commit fb981725737e2c225bef9143f7b5b69189375dba Author: dongchangxi <458593490@qq.com> Date: Wed Jan 15 09:31:42 2025 +0800 first commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..67e5d7f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +blender/3.6 \ No newline at end of file diff --git a/auto_by_order_id.py b/auto_by_order_id.py new file mode 100644 index 0000000..4c3ab2c --- /dev/null +++ b/auto_by_order_id.py @@ -0,0 +1,324 @@ +from ctypes import util +import os, oss2, time, redis, requests, shutil, sys, subprocess, json, platform +from PIL import Image, ImageDraw, ImageFont +from retrying import retry +import atexit,platform +import get_preview_image +# if platform.system() == 'Windows': +# sys.path.append('libs\\') +sys.path.append('/home/acprint/code/libs/') +import common + + +#创建文本文件 +def creatDoingLog(order_id): + #在指定路径创建一条 order_id.txt 文本 + file_path = f"doingLog/{order_id}.txt" + if os.path.exists(file_path): + return + #创建 + with open(file_path, "w", encoding="utf-8") as file: + file.write("1") + +#移除指定的文本文件 +def removeDoingLog(order_id): + file_path = f"doingLog/{order_id}.txt" + if not os.path.exists(file_path): + return + os.remove(file_path) + +#遍历文件夹,返回指定的order_id +def get_order_id_by_txt(): + #遍历文件夹 + for file in os.listdir("doingLog"): + if file != "": + arrFile = file.split(".") + return arrFile[0] + return None + +def find_blender_bin_path(): + if platform.system() == 'Linux': return '/home/acprint/code/blender/blender' + base_path = 'C:\\Program Files\\Blender Foundation\\' + if os.path.exists(base_path): + for dir in os.listdir(base_path): + if dir.startswith('Blender'): + blender_bin_path = base_path + dir + '\\blender.exe' + return f'"{blender_bin_path}"' + else: + print('未找到blender安装目录') + exit(1) + +@retry(stop_max_attempt_number=10, wait_fixed=3000) +def down_obj_fromoss(pid, print_type=1, order_id=None,download_flag="print_build"): + # print_type:// 打印状态 1:正常打印 2:重打 3:加打,4: 样品 + print('开始下载obj文件...' , pid) + + if not order_id is None: + path = os.path.join(workdir, 'print', f'{pid}_{order_id}') + else: + path = os.path.join(workdir, 'print', pid) + if not os.path.exists(path): os.makedirs(path) + + # 根据前缀获取文件列表 + prefix = f'objs/print/{pid}/' + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + find = False + findTex1Jpg = False + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'{pid}.obj'): + find = True + + if download_flag == "print_build": + if filename != str(pid)+".obj" and filename != str(pid)+".mtl" and filename != str(pid)+"Tex1.jpg": + continue + localfile = os.path.join(path, filename) + res = oss_client.get_object_to_file(file.key, localfile) + print(f'下载文件:{file.key},状态:{res.status}') + + if not find: + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'.obj'): + find = True + + # if download_flag == "print_build": + # if filename != str(pid)+".obj" and filename != str(pid)+".mtl" and filename != str(pid)+"Tex1.jpg": + # continue + localfile = os.path.join(path, filename) + res = oss_client.get_object_to_file(file.key, localfile) + print(f'下载文件:{file.key},状态:{res.status}') + break + + if find: + os.rename(localfile, os.path.join(path,f"{pid}.obj")) + #重命名文件 + + #判断是否有Tex1.jpg + if not os.path.exists(os.path.join(path,f"{pid}Tex1.jpg")): + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'{pid}.jpg'): + findTex1Jpg = True + + # if download_flag == "print_build": + # if filename != str(pid)+".obj" and filename != str(pid)+".mtl" and filename != str(pid)+"Tex1.jpg": + # continue + localfile = os.path.join(path, filename) + res = oss_client.get_object_to_file(file.key, localfile) + print(f'下载文件:{file.key},状态:{res.status}') + break + + if findTex1Jpg: + os.rename(localfile, os.path.join(path,f"{pid}Tex1.jpg")) + #重命名文件 + + # for file in filelist: + # filename = file.key.split('/')[-1] + # if filename == '': continue + # if filename.endswith(f'{pid}.obj'): + # find = True + # if filename.endswith('.obj'): + # print('找到其他obj文件,采用这个文件来生成需要的尺寸', file) + # shutil.copy(os.path.join(path, file), os.path.join(path, f'{pid}.obj')) + # find = True + # break + if not find: + print('找不到obj文件,跳过') + common.notify(f"{pid}-构建打印文件,找不到obj文件,异常,跳过该任务处理") + # sys.exit(1) + os.system(f'python auto_convert3d.py') + return + + # print('下载完成后静默10秒,等待文件写入完成') + # time.sleep(10) +#根据pid获取orderId +def getPidFromOrderId(orderId): + getPidFromOrderId_url = 'https://mp.api.suwa3d.com/api/printOrder/info' + print(f'{getPidFromOrderId_url}?id={orderId}') + res = requests.get(f'{getPidFromOrderId_url}?id={orderId}') + resCode = res.json()['code'] + if int(resCode) != 1000: + return -1 + print(res.text) + return res.json()['data'] + +def detect_obj4print(pid, orderId): + for file in os.listdir(os.path.join(workdir, 'print', f'{pid}_{orderId}')): + if file.endswith('.obj') and 'x' in file: + return True + + +def restart_current_process(new_command): + try: + # 保存新进程的命令 + command = new_command.split() + + # 启动新进程 + new_process = subprocess.Popen(command) + + # 打印新进程的PID + print(f"New process started with PID: {new_process.pid}") + + # 终止当前进程 + os._exit(0) + + except Exception as e: + print(f"An error occurred: {e}") + + +def make3d4print_task(orderId): + res = getPidFromOrderId(orderId) + if res == -1: + print("查询打印订单信息失败,重新开启进程") + #os.system(f'python auto_convert3d.py') + return + pid = str(res['pid']) + if pid == "88985": + return + if pid == "": + return + #创建正在处理的文本内容 + creatDoingLog(orderId) + + print_type = res['print_type'] + digital_type = res['digital_type'] # 0: 只有手办 1: 只有数字模型 2: 手办+数字模型 + + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成待打印模型 start', ) + isFindObj = False + down_obj_fromoss(pid, print_type, orderId,download_flag="print_build") + # 获取程序运行当前目录 + resize_py_path = os.path.join(os.getcwd(), 'blender', 'resize_model.py') + print(f'{blenderbin} -b -P {resize_py_path} -- {orderId}') + os.system(f'{blenderbin} -b -P {resize_py_path} -- {orderId}') + if not detect_obj4print(pid, orderId): + print('obj文件生成异常,退出,重新执行') + restart_current_process("python auto_convert3d.py") + return + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成待打印模型 end') + + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 处理鼻孔 start') + if os.path.exists(f'{workdir}/print/{pid}_{orderId}/{pid}Tex1_old.jpg'): + print('已经处理过鼻孔,跳过') + else: + os.system(f'python fix_nose.py {pid}_{orderId}') + #上传jpg文件 + oss_client.put_object_from_file(f'objs/print/{pid}/{pid}Tex1.jpg', os.path.join(workdir, 'print', f'{pid}_{orderId}', f'{pid}Tex1.jpg')) + # oss_client.put_object_from_file(f'objs/print/{pid}/{pid}Tex1_old.jpg', os.path.join(workdir, 'print', f'{pid}_{orderId}', f'{pid}Tex1_old.jpg')) + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 处理鼻孔 end') + + # print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成脚底板二维码 start') + # os.system(f'{blenderbin} -b -P d:\\apps\\blender\\auto_dm.py -- {pid}_{orderId}') + # print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成脚底板二维码 end') + + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 上传生成的模型 start') + + path = os.path.join(workdir, 'print', f'{pid}_{orderId}') + + + # 如果指定文件夹目标文件存在,则先删除 + if os.path.exists(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')): + delete_files_in_directory(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')) + + for file in os.listdir(path): + # 跳过一些不需要上传的文件 + if file in [f'{pid}.png',f'{pid}_old.jpg', f'{pid}.obj', f'{pid}_decimate.glb', f'{pid}_decimate.obj', f'{pid}_decimate.mtl', f'{pid}Tex1_decimate.jpg', f'{pid}_original.obj', f'{pid}_original.mtl']: continue + print("当前目录",os.path.join(path, file)) + + #将文件移动到指定目录 + if not os.path.exists(os.path.join(workdir, f'complate/objs/{pid}/')): + os.makedirs(os.path.join(workdir, f'complate/objs/{pid}/'),mode=0o777, exist_ok=True) + + if not os.path.exists(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')): + os.makedirs(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/'),mode=0o777, exist_ok=True) + + shutil.move(os.path.join(path, file), os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')) + + + # oss_client.put_object_from_file(f'objs/print/{pid}/{file}', os.path.join(path, file)) + + # texture_file = os.path.join(path, f'{pid}Tex1_decimate.jpg') + # if os.path.exists(texture_file): + # img = Image.open(texture_file) + # img = img.resize((int(img.size[0] * 0.5), int(img.size[1] * 0.5))) + # img.save(texture_file, quality=90, optimize=True) + # print('resize texture file to 50% success') + + input = os.path.join(path, f'{pid}_decimate.obj') + output = os.path.join(path, f'{pid}_decimate.glb') + # os.system(f'gltfpack -c -i {input} -o {output}') + # oss_client.put_object_from_file(f'glbs/3d/{pid}.glb', output) + + shutil.rmtree(path, ignore_errors=True) + + print(f'{update_makeprintobj_status_url}?id={orderId}') + if digital_type == 1: + print('只有数字模型,不需要推送手办打印任务,仍有调用接口') + + res = requests.get(f'{update_makeprintobj_status_url}?id={orderId}') + print('更新打印状态:', res.text) + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 上传生成的模型 end') + removeDoingLog(orderId) + #生成封面图片 + print('小票封面图处理中....') + get_preview_image.createImage(pid) + #os.system(f'python get_preview_image.py {pid}') + + print(f"{pid}-已处理结束") + + +def delete_files_in_directory(directory): + for file_name in os.listdir(directory): + file_path = os.path.join(directory, file_name) + try: + if os.path.isfile(file_path): + os.remove(file_path) + print(f"Deleted: {file_path}") + except Exception as e: + print(f"Error deleting {file_path}: {e}") + + + + + +# def main(r): +# print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), '模型生成程序 start') +# while True: + #构建打印文件 + + +if __name__ == '__main__': + + # atexit.register(common.notify,"虚拟机,生成打印任务程序停止") + AccessKeyId = 'LTAI5tSReWm8hz7dSYxxth8f' + AccessKeySecret = '8ywTDF9upPAtvgXtLKALY2iMYHIxdS' + Endpoint = 'oss-cn-shanghai.aliyuncs.com' + Bucket = 'suwa3d-securedata' + oss_client = oss2.Bucket(oss2.Auth(AccessKeyId, AccessKeySecret), Endpoint, Bucket) + update_check_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/updateStatusToWaitingPlatformCheckingStatus' + update_team_check_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/updateStatusToWaitingTeamCheckingStatus' + update_status_printstatus_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/updateBuildPrintModelStatus' + update_makeprintobj_status_url = 'https://mp.api.suwa3d.com/api/printOrder/updateMakePrintObjSucceed' + getRepairInfo_url = 'https://repair.api.suwa3d.com/api/modelRepairOrder/teamCheckGLBInfo' + update_repair_status_url = 'https://repair.api.suwa3d.com/api/modelRepairOrder/updateStatusToWaitingTeamCheckingStatus' + if platform.system() == 'Windows': + workdir = 'E:\\' + else: + workdir = '/data/datasets/' + blenderbin = find_blender_bin_path() + + #r = redis.Redis(host='106.14.158.208', password='kcV2000', port=6379, db=6) + #E:\\complate/objs/147852_54579/ + # os.remove(os.path.join(workdir, f'complate/objs/147852_54579/')) + orderId = sys.argv[1] + print(sys.argv) + if orderId == 0: + print("请输入order_id") + exit() + + make3d4print_task(orderId) \ No newline at end of file diff --git a/auto_convert3d.py b/auto_convert3d.py new file mode 100644 index 0000000..22a5b3a --- /dev/null +++ b/auto_convert3d.py @@ -0,0 +1,368 @@ +from ctypes import util +import os, oss2, time, redis, requests, shutil, sys, subprocess, json, platform,random +from PIL import Image, ImageDraw, ImageFont +from retrying import retry +import atexit,platform +import get_preview_image +# if platform.system() == 'Windows': +# sys.path.append('libs\\') +sys.path.append('/home/acprint/code/libs/') +import common + + +#创建文本文件 +def creatDoingLog(order_id): + #在指定路径创建一条 order_id.txt 文本 + file_path = f"doingLog/{order_id}.txt" + if os.path.exists(file_path): + return + #创建 + with open(file_path, "w", encoding="utf-8") as file: + file.write("1") + +#移除指定的文本文件 +def removeDoingLog(order_id): + file_path = f"doingLog/{order_id}.txt" + if not os.path.exists(file_path): + return + os.remove(file_path) + +#遍历文件夹,返回指定的order_id +def get_order_id_by_txt(): + #遍历文件夹 + for file in os.listdir("doingLog"): + if file != "": + arrFile = file.split(".") + return arrFile[0] + return None + +def find_blender_bin_path(): + if platform.system() == 'Linux': return '/home/acprint/code/blender/blender' + base_path = 'C:\\Program Files\\Blender Foundation\\' + if os.path.exists(base_path): + for dir in os.listdir(base_path): + if dir.startswith('Blender'): + blender_bin_path = base_path + dir + '\\blender.exe' + return f'"{blender_bin_path}"' + else: + print('未找到blender安装目录') + exit(1) + +@retry(stop_max_attempt_number=10, wait_fixed=3000) +def down_obj_fromoss(pid, print_type=1, order_id=None,download_flag="print_build"): + # print_type:// 打印状态 1:正常打印 2:重打 3:加打,4: 样品 + print('开始下载obj文件...' , pid) + + if not order_id is None: + path = os.path.join(workdir, 'print', f'{pid}_{order_id}') + else: + path = os.path.join(workdir, 'print', pid) + if not os.path.exists(path): os.makedirs(path) + + # 根据前缀获取文件列表 + prefix = f'objs/print/{pid}/' + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + find = False + findTex1Jpg = False + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'{pid}.obj'): + find = True + + if download_flag == "print_build": + if filename != str(pid)+".obj" and filename != str(pid)+".mtl" and filename != str(pid)+"Tex1.jpg": + continue + localfile = os.path.join(path, filename) + res = oss_client.get_object_to_file(file.key, localfile) + print(f'下载文件:{file.key},状态:{res.status}') + + if not find: + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'.obj'): + find = True + + # if download_flag == "print_build": + # if filename != str(pid)+".obj" and filename != str(pid)+".mtl" and filename != str(pid)+"Tex1.jpg": + # continue + localfile = os.path.join(path, filename) + res = oss_client.get_object_to_file(file.key, localfile) + print(f'下载文件:{file.key},状态:{res.status}') + break + + if find: + os.rename(localfile, os.path.join(path,f"{pid}.obj")) + #重命名文件 + + #判断是否有Tex1.jpg + if not os.path.exists(os.path.join(path,f"{pid}Tex1.jpg")): + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'{pid}.jpg'): + findTex1Jpg = True + + # if download_flag == "print_build": + # if filename != str(pid)+".obj" and filename != str(pid)+".mtl" and filename != str(pid)+"Tex1.jpg": + # continue + localfile = os.path.join(path, filename) + res = oss_client.get_object_to_file(file.key, localfile) + print(f'下载文件:{file.key},状态:{res.status}') + break + + if findTex1Jpg: + os.rename(localfile, os.path.join(path,f"{pid}Tex1.jpg")) + #重命名文件 + + # for file in filelist: + # filename = file.key.split('/')[-1] + # if filename == '': continue + # if filename.endswith(f'{pid}.obj'): + # find = True + # if filename.endswith('.obj'): + # print('找到其他obj文件,采用这个文件来生成需要的尺寸', file) + # shutil.copy(os.path.join(path, file), os.path.join(path, f'{pid}.obj')) + # find = True + # break + if not find: + print('找不到obj文件,跳过') + common.notify(f"{pid}-构建打印文件,找不到obj文件,异常,跳过该任务处理") + # sys.exit(1) + os.system(f'python auto_convert3d.py') + return + + # print('下载完成后静默10秒,等待文件写入完成') + # time.sleep(10) +#根据pid获取orderId +def getPidFromOrderId(orderId): + getPidFromOrderId_url = 'https://mp.api.suwa3d.com/api/printOrder/info' + print(f'{getPidFromOrderId_url}?id={orderId}') + res = requests.get(f'{getPidFromOrderId_url}?id={orderId}') + resCode = res.json()['code'] + if int(resCode) != 1000: + return -1 + print(res.text) + return res.json()['data'] + +def detect_obj4print(pid, orderId): + for file in os.listdir(os.path.join(workdir, 'print', f'{pid}_{orderId}')): + if file.endswith('.obj') and 'x' in file: + return True + + +def restart_current_process(new_command): + try: + # 保存新进程的命令 + command = new_command.split() + + # 启动新进程 + new_process = subprocess.Popen(command) + + # 打印新进程的PID + print(f"New process started with PID: {new_process.pid}") + + # 终止当前进程 + os._exit(0) + + except Exception as e: + print(f"An error occurred: {e}") + + +def make3d4print_task(r): + + + tempOrderId = "0" + try: + if r.llen('model:printOrder') == 0: + time.sleep(5) + + # tempOrderId = get_order_id_by_txt() + # if tempOrderId == "0": + # return + except Exception as e: + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), 'redis连接异常,重新连接') + print(e) + time.sleep(5) + r = redis.Redis(host='106.14.158.208', password='kcV2000', port=6379, db=6) + return + orderId = None + if tempOrderId == "0": + orderId = r.lpop('model:printOrder') + else: + orderId = tempOrderId + + if orderId is None: return + + if type(orderId) != str: + orderId = orderId.decode('utf-8') + # orderId = 56077 + res = getPidFromOrderId(orderId) + if res == -1: + print("查询打印订单信息失败,重新开启进程") + #os.system(f'python auto_convert3d.py') + return + pid = str(res['pid']) + if pid == "88985": + return + if pid == "": + return + #创建正在处理的文本内容 + creatDoingLog(orderId) + + print_type = res['print_type'] + digital_type = res['digital_type'] # 0: 只有手办 1: 只有数字模型 2: 手办+数字模型 + + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成待打印模型 start', ) + isFindObj = False + down_obj_fromoss(pid, print_type, orderId,download_flag="print_build") + # 获取程序运行当前目录 + resize_py_path = os.path.join(os.getcwd(), 'blender', 'resize_model.py') + print(f'{blenderbin} -b -P {resize_py_path} -- {orderId}') + os.system(f'{blenderbin} -b -P {resize_py_path} -- {orderId}') + if not detect_obj4print(pid, orderId): + print('obj文件生成异常,退出,重新执行') + restart_current_process("python auto_convert3d.py") + return + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成待打印模型 end') + + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 处理鼻孔 start') + if os.path.exists(f'{workdir}/print/{pid}_{orderId}/{pid}Tex1_old.jpg'): + print('已经处理过鼻孔,跳过') + else: + os.system(f'python fix_nose.py {pid}_{orderId}') + #上传jpg文件 + oss_client.put_object_from_file(f'objs/print/{pid}/{pid}Tex1.jpg', os.path.join(workdir, 'print', f'{pid}_{orderId}', f'{pid}Tex1.jpg')) + # oss_client.put_object_from_file(f'objs/print/{pid}/{pid}Tex1_old.jpg', os.path.join(workdir, 'print', f'{pid}_{orderId}', f'{pid}Tex1_old.jpg')) + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 处理鼻孔 end') + + # print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成脚底板二维码 start') + # os.system(f'{blenderbin} -b -P d:\\apps\\blender\\auto_dm.py -- {pid}_{orderId}') + # print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 生成脚底板二维码 end') + + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 上传生成的模型 start') + + path = os.path.join(workdir, 'print', f'{pid}_{orderId}') + + + # 如果指定文件夹目标文件存在,则先删除 + if os.path.exists(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')): + delete_files_in_directory(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')) + + for file in os.listdir(path): + # 跳过一些不需要上传的文件 + if file in [f'{pid}.png',f'{pid}_old.jpg', f'{pid}.obj', f'{pid}_decimate.glb', f'{pid}_decimate.obj', f'{pid}_decimate.mtl', f'{pid}Tex1_decimate.jpg', f'{pid}_original.obj', f'{pid}_original.mtl']: continue + print("当前目录",os.path.join(path, file)) + + #将文件移动到指定目录 + if not os.path.exists(os.path.join(workdir, f'complate/objs/{pid}/')): + os.makedirs(os.path.join(workdir, f'complate/objs/{pid}/'),mode=0o777, exist_ok=True) + + if not os.path.exists(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')): + os.makedirs(os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/'),mode=0o777, exist_ok=True) + + shutil.move(os.path.join(path, file), os.path.join(workdir, f'complate/objs/{pid}/order_{orderId}/')) + + + # oss_client.put_object_from_file(f'objs/print/{pid}/{file}', os.path.join(path, file)) + + # texture_file = os.path.join(path, f'{pid}Tex1_decimate.jpg') + # if os.path.exists(texture_file): + # img = Image.open(texture_file) + # img = img.resize((int(img.size[0] * 0.5), int(img.size[1] * 0.5))) + # img.save(texture_file, quality=90, optimize=True) + # print('resize texture file to 50% success') + + input = os.path.join(path, f'{pid}_decimate.obj') + output = os.path.join(path, f'{pid}_decimate.glb') + # os.system(f'gltfpack -c -i {input} -o {output}') + # oss_client.put_object_from_file(f'glbs/3d/{pid}.glb', output) + + shutil.rmtree(path, ignore_errors=True) + + print(f'{update_makeprintobj_status_url}?id={orderId}') + if digital_type == 1: + print('只有数字模型,不需要推送手办打印任务,仍有调用接口') + + res = requests.get(f'{update_makeprintobj_status_url}?id={orderId}') + print('更新打印状态:', res.text) + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), f'orderId:{orderId} pid:{pid} 上传生成的模型 end') + removeDoingLog(orderId) + #生成封面图片 + print('小票封面图处理中....') + get_preview_image.createImage(pid) + #os.system(f'python get_preview_image.py {pid}') + + print(f"{pid}-已处理结束") + + restart_current_process("python auto_convert3d.py") + + +def delete_files_in_directory(directory): + for file_name in os.listdir(directory): + file_path = os.path.join(directory, file_name) + try: + if os.path.isfile(file_path): + os.remove(file_path) + print(f"Deleted: {file_path}") + except Exception as e: + print(f"Error deleting {file_path}: {e}") + + +def create_redis_connection(): + """创建 Redis 连接,若连接失败则重试""" + while True: + try: + r = redis.Redis(host="106.14.158.208", password="kcV2000", port=6379, db=6) + # 尝试进行一次操作,检查连接是否有效 + r.ping() # ping 操作是一个简单的连接测试 + print("Redis连接成功!") + return r + except ConnectionError: + print("Redis连接失败,正在重试...") + time.sleep(5) + + +def main(r): + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), '模型生成程序 start') + while True: + try: + #随机休眠 1- 9 + time.sleep(random.uniform(1, 9)) + + + #构建打印文件 + make3d4print_task(r) + except Exception as e: + print(f'出现异常:{e}') + time.sleep(15) + r = create_redis_connection() + continue + +if __name__ == '__main__': + + # atexit.register(common.notify,"虚拟机,生成打印任务程序停止") + AccessKeyId = 'LTAI5tSReWm8hz7dSYxxth8f' + AccessKeySecret = '8ywTDF9upPAtvgXtLKALY2iMYHIxdS' + Endpoint = 'oss-cn-shanghai.aliyuncs.com' + Bucket = 'suwa3d-securedata' + oss_client = oss2.Bucket(oss2.Auth(AccessKeyId, AccessKeySecret), Endpoint, Bucket) + update_check_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/updateStatusToWaitingPlatformCheckingStatus' + update_team_check_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/updateStatusToWaitingTeamCheckingStatus' + update_status_printstatus_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/updateBuildPrintModelStatus' + update_makeprintobj_status_url = 'https://mp.api.suwa3d.com/api/printOrder/updateMakePrintObjSucceed' + getRepairInfo_url = 'https://repair.api.suwa3d.com/api/modelRepairOrder/teamCheckGLBInfo' + update_repair_status_url = 'https://repair.api.suwa3d.com/api/modelRepairOrder/updateStatusToWaitingTeamCheckingStatus' + if platform.system() == 'Windows': + workdir = 'E:\\' + else: + workdir = '/data/datasets/' + blenderbin = find_blender_bin_path() + + r = create_redis_connection() + #E:\\complate/objs/147852_54579/ + # os.remove(os.path.join(workdir, f'complate/objs/147852_54579/')) + main(r) \ No newline at end of file diff --git a/blender/E:/complate/objs/68724/foot_print_id_38962/68724Tex1.jpg b/blender/E:/complate/objs/68724/foot_print_id_38962/68724Tex1.jpg new file mode 100644 index 0000000..47848b3 Binary files /dev/null and b/blender/E:/complate/objs/68724/foot_print_id_38962/68724Tex1.jpg differ diff --git a/blender/auto_qrcode.py b/blender/auto_qrcode.py new file mode 100644 index 0000000..7295a5e --- /dev/null +++ b/blender/auto_qrcode.py @@ -0,0 +1,148 @@ +import bpy, sys, os, math, bmesh +# from PIL import Image, ImageDraw, ImageFont + +def gen_qrcode(pid): + fontHeightMax = 40 + fontsize = 1 + qr = qrcode.QRCode() + qr.border = 2 + qr.add_data(pid) + img = qr.make_image(fit=True) + img = img.transform((250, 294), Image.Transform.EXTENT, (0, 0, 250, 294), fillcolor='white') + + cwd = os.path.dirname(os.path.abspath(__file__)) + fontfile = os.path.join(cwd, 'fonts', 'Helvetica.ttf') + font = ImageFont.truetype(fontfile, fontsize) + while font.getsize(pid)[1] <= fontHeightMax and font.getsize(pid)[0] <= 240: + fontsize += 1 + font = ImageFont.truetype(fontfile, fontsize) + fontsize -= 1 + + captionx = (250 - font.getsize(pid)[0]) / 2 + draw = ImageDraw.Draw(img) + draw.text((captionx, 242), pid, font=font) + img.show() + img.save(f'{workdir}{pid}.png') + +def auto_rotate(pid): + # 坐标复位 + obj = bpy.context.selected_objects[0] + obj.rotation_euler[0] = 0 + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_VOLUME', center='MEDIAN') + bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_1', align_axis={'Y', 'Z'}) + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + # bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}_align_yz.obj') + + # 躺平到打印机排版需要的坐标与角度 + obj.rotation_euler = (math.radians(90), math.radians(90), 0) + bpy.ops.object.transform_apply(rotation=True) + # bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}_rotate_y90.obj') + + heights = {} + min_height = 999999 + min_i = 0 + max_height = -999999 + max_i = 0 + + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_VOLUME', center='MEDIAN') + bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_3', align_axis={'X', 'Y', 'Z'}) + + # 步进精度2旋转X轴到180度,找到Y轴最低点和最高点,其中最低点为打印 + step = 2 + i = 0 + while i <= 180: + obj.rotation_euler = (math.radians(step), 0, 0) + bpy.ops.object.transform_apply(rotation=True) + if obj.dimensions[1] < min_height: + min_height = obj.dimensions[1] + min_i = i + if obj.dimensions[1] > max_height: + max_height = obj.dimensions[1] + max_i = i + heights[i] = (obj.dimensions[0], obj.dimensions[1], obj.dimensions[2]) + print(i, heights[i]) + i += step + + obj.rotation_euler = (0, 0, 0) + bpy.ops.object.transform_apply(rotation=True) + obj.rotation_euler = (math.radians(min_i), 0, 0) + bpy.ops.object.transform_apply(rotation=True) + bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}.obj') + + # obj.rotation_euler = (0, 0, 0) + # bpy.ops.object.transform_apply(rotation=True) + # obj.rotation_euler = (math.radians(max_i), 0, 0) + # bpy.ops.object.transform_apply(rotation=True) + # bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}_maxz.obj') + print(f'最小高度: {min_height} @ {heights[min_i]}min_i:{min_i}' , f'最大高度: {max_height} @ {heights[max_i]}max_i:{max_i}') + +def cut_obj(pid): + # 根据定位用一个面切割模型 + offset = 45.5 + radian = math.radians(90) + bpy.ops.mesh.primitive_plane_add(size=200, enter_editmode=False, align='WORLD', location=(offset, 0, 0), rotation=(0, radian, 0), scale=(1, 1, 1)) + + # 布尔切割,保留交集切面 + bpy.ops.object.modifier_add(type='BOOLEAN') + bpy.context.object.modifiers["Boolean"].object = bpy.data.objects[pid] + bpy.context.object.modifiers["Boolean"].operation = 'INTERSECT' + bpy.context.object.modifiers["Boolean"].solver = 'FAST' + bpy.ops.object.modifier_apply(modifier="Boolean") + + # 拆分切割面为多个多边形,然后遍历多边形,找到最大的面积 + bpy.ops.mesh.separate(type='LOOSE') + + max_area = 0 + max_obj = None + for obj in bpy.data.objects: + if obj.type == 'MESH' and obj.name.startswith('Plane'): + area = obj.data.polygons[0].area + if area > max_area: + max_area = area + max_obj = obj + + # 选中最大面积的多边形,然后计算中心点 + bpy.ops.object.select_all(action='DESELECT') + max_obj.select_set(True) + bpy.context.view_layer.objects.active = max_obj + bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY') + + return max_obj + +def main(): + filename = f'{workdir}{pid}.obj' + print('正在处理:', filename) + bpy.ops.import_scene.obj(filepath=filename) + + auto_rotate(pid) + # gen_qrcode(pid) + + # 脚底切片,查找最大面积,计算中心点,计算坐标位置,怼入二维码贴片 + max_obj = cut_obj(pid) + bpy.ops.import_scene.obj(filepath=f'{workdir}qr.obj') + qr_obj = bpy.data.objects['Cube'] + shore_obj = bpy.data.objects['Cube.001'] + # bpy.data.objects['Cube'].origin_set(type='ORIGIN_GEOMETRY') + # bpy.data.objects['Cube.001'].origin_set(type='ORIGIN_GEOMETRY') + # bpy.data.objects['Cube.002'].origin_set(type='ORIGIN_GEOMETRY') + # bpy.data.objects['Cube.003'].origin_set(type='ORIGIN_GEOMETRY') + bpy.data.objects['Cube'] = (math.radians(90), math.radians(90), 0) + bpy.data.objects['Cube.001'].rotation_euler = (math.radians(90), math.radians(90), 0) + bpy.data.objects['Cube.002'].rotation_euler = (math.radians(90), math.radians(90), 0) + bpy.data.objects['Cube.003'].rotation_euler = (math.radians(90), math.radians(90), 0) + qr_obj.location = (max_obj.location[0] - qr_obj.dimensions[1] / 2 - shore_obj.dimensions[1]/2, max_obj.location[1], max_obj.location[2]) + shore_obj.location = (qr_obj.location[0] - shore_obj.dimensions[1]/2, max_obj.location[1], max_obj.location[2]) + bpy.data.objects['Cube.002'].location = (shore_obj.location[0], shore_obj.location[1]+0.2, shore_obj.location[2]) + bpy.data.objects['Cube.003'].location = (shore_obj.location[0], shore_obj.location[1]-0.2, shore_obj.location[2]) + + bpy.ops.object.transform_apply(rotation=True, location=True, scale=True) + + +if __name__ == '__main__': + workdir = '/home/water/Downloads/' + if len(sys.argv) - (sys.argv.index("--") +1) < 1: + print("Usage: blender -b -P auto_qrcode.py -- ") + sys.exit(1) + pid = sys.argv[sys.argv.index("--") + 1] + main() \ No newline at end of file diff --git a/blender/autofix.py b/blender/autofix.py new file mode 100644 index 0000000..e79689c --- /dev/null +++ b/blender/autofix.py @@ -0,0 +1,160 @@ +from math import radians +import sys, os, time, bpy, requests, json, bmesh +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +import platform +if platform.system() == 'Windows': + sys.path.append('e:\\libs\\') +else: + sys.path.append('/data/deploy/make3d/make2/libs/') +import config + +def get_obj_version(filename): + with open(filename, 'r') as f: + for line in f: + if line.startswith('# Engine version'): + return float(line.split(' ')[-1][1:].strip()[:3]) + exit(0) + return None + +def delete_lines_in_file(filename, count): + with open(filename, 'r') as f: + lines = f.readlines() + lines = lines[count:] + with open(filename, 'w') as f: + f.writelines(lines) + +def diff_minutes_and_seconds(start): + hours = int((time.time() - start) / 3600) + minutes = int((time.time() - start) / 60) + seconds = int((time.time() - start) % 60) + microseconds = int(int((time.time() - start) * 1000000) % 1000000 / 1000) + return f'{hours}:{minutes}:{seconds}.{microseconds}' + +def get_headcount(pid): + res = requests.get(config.urls['get_printinfo_url'], params={'id': pid}) + print('get_printsize_url:', res.url) + print('res:', res.text) + if res.status_code != 200: + print('获取人数失败,程序退出') + exit(1) + res = json.loads(res.text) + return res['data']['headcount'] + +def bmesh_copy_from_object(obj, transform=True, triangulate=True, apply_modifiers=False): + """Returns a transformed, triangulated copy of the mesh""" + assert obj.type == 'MESH' + if apply_modifiers and obj.modifiers: + import bpy + depsgraph = bpy.context.evaluated_depsgraph_get() + obj_eval = obj.evaluated_get(depsgraph) + me = obj_eval.to_mesh() + bm = bmesh.new() + bm.from_mesh(me) + obj_eval.to_mesh_clear() + else: + me = obj.data + if obj.mode == 'EDIT': + bm_orig = bmesh.from_edit_mesh(me) + bm = bm_orig.copy() + else: + bm = bmesh.new() + bm.from_mesh(me) + if transform: + matrix = obj.matrix_world.copy() + if not matrix.is_identity: + bm.transform(matrix) + matrix.translation.zero() + if not matrix.is_identity: + bm.normal_update() + if triangulate: + bmesh.ops.triangulate(bm, faces=bm.faces) + return bm + +def getPSid(pid): + get_psid_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/photoStudio' + res = requests.get(get_psid_url, params={'pid': pid}) + res = json.loads(res.text) + return str(res['data']) + +def getPSRotation(pid): + get_ps_rotation_url = 'https://mp.api.suwa3d.com/api/takephotoOrder/angle' + res = requests.get(get_ps_rotation_url, params={'pid': pid}) + res = json.loads(res.text) + rotation = (radians(0), radians(0), radians(int(res['data']))) + return rotation + +def main(): + start = time.time() + workdir = 'e:\\' + + if len(sys.argv) - (sys.argv.index("--") +1) < 1: + print("Usage: blender -b -P autofix.py -- ") + sys.exit(1) + input_file = sys.argv[sys.argv.index("--") + 1] + + for pid in input_file.split(','): + psid = getPSid(pid) + + bpy.ops.wm.read_homefile() + # bpy.context.scene.unit_settings.scale_length = 0.001 + bpy.context.scene.unit_settings.length_unit = 'CENTIMETERS' + bpy.context.scene.unit_settings.mass_unit = 'GRAMS' + bpy.ops.object.delete(use_global=False, confirm=False) + + filename = f'{workdir}{pid}\\output\{pid}.obj' + print('正在处理:', filename) + bpy.ops.import_scene.obj(filepath=filename) + bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_2', align_axis={'Z'}) + print('import obj time:', diff_minutes_and_seconds(start)) + + # rotate obj + obj = bpy.context.selected_objects[0] + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + rotation = getPSRotation(pid) + print('rotation:', rotation) + obj.rotation_euler = rotation + print('rotate obj time:', diff_minutes_and_seconds(start)) + # resize object + scale = 90 / obj.dimensions.z + obj.scale = (scale, scale, scale) + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_VOLUME', center='MEDIAN') + bpy.context.object.location[0] = 0 + bpy.context.object.location[1] = 0 + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + # bpy.ops.wm.save_as_mainfile(filepath=f'{workdir}{pid}\\output\{pid}_4.blend') + + bm = bmesh_copy_from_object(obj) + obj_volume = round(bm.calc_volume() / 1000, 3) + print('volume:', obj_volume) + print('weight:', obj_volume * 1.2, 'g') + + faces = len(obj.data.polygons) + print('faces:', faces) + + # save object + bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}\\output\{pid}.obj') + + # 生成数字模型 + headcount = get_headcount(pid) + faces_dest = 120000 * headcount + + # 减面 + faces_current = len(obj.data.polygons) + bpy.ops.object.modifier_add(type='DECIMATE') + bpy.context.object.modifiers["Decimate"].ratio = faces_dest / faces_current + bpy.ops.object.modifier_apply(modifier="Decimate") + + bpy.ops.export_scene.gltf(filepath=os.path.join(workdir, pid, 'output', f'{pid}_decimate.glb'), export_format='GLB', export_apply=True, export_jpeg_quality=75, export_draco_mesh_compression_enable=False) + + config.oss_bucket.put_object_from_file(f'glbs/3d/{pid}.glb', os.path.join(workdir, pid, 'output', f'{pid}_decimate.glb')) + print('免费体验3d相册已生成,上传glb文件:', f'glbs/3d/{pid}.glb 完成') + # render scene to a file + # bpy.context.scene.render.filepath = f'{workdir}{pid}_fixed.png' + # bpy.ops.render.render(write_still=True, use_viewport=True) + print('render time:', diff_minutes_and_seconds(start)) + + bpy.ops.wm.quit_blender() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/blender/autofix10.py b/blender/autofix10.py new file mode 100644 index 0000000..932ea6d --- /dev/null +++ b/blender/autofix10.py @@ -0,0 +1,144 @@ +from math import radians +import sys, os, time, bpy, requests, json, bmesh + +def get_obj_version(filename): + with open(filename, 'r') as f: + for line in f: + if line.startswith('# Engine version'): + return float(line.split(' ')[-1][1:].strip()[:3]) + exit(0) + return None + +def delete_lines_in_file(filename, count): + with open(filename, 'r') as f: + lines = f.readlines() + lines = lines[count:] + with open(filename, 'w') as f: + f.writelines(lines) + +def diff_minutes_and_seconds(start): + hours = int((time.time() - start) / 3600) + minutes = int((time.time() - start) / 60) + seconds = int((time.time() - start) % 60) + microseconds = int(int((time.time() - start) * 1000000) % 1000000 / 1000) + return f'{hours}:{minutes}:{seconds}.{microseconds}' + +def getPSid(pid): + get_psid_url = 'https://mp.api.suwa3d.com/api/customerP3dLog/photoStudio' + res = requests.get(get_psid_url, params={'pid': pid}) + res = json.loads(res.text) + return str(res['data']) + +def bmesh_copy_from_object(obj, transform=True, triangulate=True, apply_modifiers=False): + """Returns a transformed, triangulated copy of the mesh""" + assert obj.type == 'MESH' + if apply_modifiers and obj.modifiers: + import bpy + depsgraph = bpy.context.evaluated_depsgraph_get() + obj_eval = obj.evaluated_get(depsgraph) + me = obj_eval.to_mesh() + bm = bmesh.new() + bm.from_mesh(me) + obj_eval.to_mesh_clear() + else: + me = obj.data + if obj.mode == 'EDIT': + bm_orig = bmesh.from_edit_mesh(me) + bm = bm_orig.copy() + else: + bm = bmesh.new() + bm.from_mesh(me) + if transform: + matrix = obj.matrix_world.copy() + if not matrix.is_identity: + bm.transform(matrix) + matrix.translation.zero() + if not matrix.is_identity: + bm.normal_update() + if triangulate: + bmesh.ops.triangulate(bm, faces=bm.faces) + return bm + + +def main(): + start = time.time() + config = { + '0': { + 'rotation': (radians(0), radians(0), radians(0)), + }, + '1': { + 'rotation': (radians(0), radians(0), radians(66)), + }, + '29': { + 'rotation': (radians(0), radians(0), radians(180)), + }, + '45': { + 'rotation': (radians(0), radians(0), radians(105)), + }, + '46': { + 'rotation': (radians(0), radians(0), radians(-10)), + }, + '74': { + 'rotation': (radians(0), radians(0), radians(110)), + }, + '75': { + 'rotation': (radians(0), radians(0), radians(210)), + }, + } + workdir = 'e:\\' + + if len(sys.argv) - (sys.argv.index("--") +1) < 1: + print("Usage: blender -b -P autofix.py -- ") + sys.exit(1) + input_file = sys.argv[sys.argv.index("--") + 1] + + for pid in input_file.split(','): + psid = getPSid(pid) + + bpy.ops.object.delete(use_global=False, confirm=False) + + filename = f'{workdir}{pid}\\output\{pid}.obj' + print('正在处理:', filename) + bpy.ops.import_scene.obj(filepath=filename) + bpy.ops.object.align(relative_to='OPT_1', align_axis={'X'}) + # bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + print('import obj time:', diff_minutes_and_seconds(start)) + + # rotate obj + obj = bpy.context.selected_objects[0] + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + + if psid in config: + obj.rotation_euler = config[psid]['rotation'] + else: + obj.rotation_euler = config['0']['rotation'] + print('rotate obj time:', diff_minutes_and_seconds(start)) + # bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + + # resize object + obj_scale = 90 / obj.dimensions.z + print('scale:', obj_scale) + + obj.scale = (obj_scale, obj_scale, obj_scale) + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + + bm = bmesh_copy_from_object(obj) + obj_volume = round(bm.calc_volume() / 1000, 3) + print('volume:', obj_volume) + print('weight:', obj_volume * 1.2, 'g') + + faces = len(obj.data.polygons) + print('faces:', faces) + + # save object + bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}\\output\{pid}.obj') + # render scene to a file + # bpy.context.scene.render.filepath = f'{workdir}{pid}_fixed.png' + # bpy.ops.render.render(write_still=True, use_viewport=True) + print('render time:', diff_minutes_and_seconds(start)) + + bpy.ops.wm.quit_blender() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/blender/blender b/blender/blender new file mode 100755 index 0000000..58d4995 Binary files /dev/null and b/blender/blender differ diff --git a/blender/blender-launcher b/blender/blender-launcher new file mode 100755 index 0000000..521e2c8 --- /dev/null +++ b/blender/blender-launcher @@ -0,0 +1,12 @@ +#!/bin/sh +BF_DIST_BIN=$(dirname "$0") +BF_PROGRAM="blender" + +# Add own lib folder first, because Steam or other environments may set an +# LD_LIBRARY_PATH that has priority over the runpath in the Blender excutable, +# but contains incompatible libraries. +LD_LIBRARY_PATH=${BF_DIST_BIN}/lib:${LD_LIBRARY_PATH} + +export LD_LIBRARY_PATH + +exec "$BF_DIST_BIN/$BF_PROGRAM" ${1+"$@"} diff --git a/blender/blender-softwaregl b/blender/blender-softwaregl new file mode 100755 index 0000000..acd4dc3 --- /dev/null +++ b/blender/blender-softwaregl @@ -0,0 +1,22 @@ +#!/bin/sh +BF_DIST_BIN=$(dirname "$0") +BF_PROGRAM="blender" # BF_PROGRAM=$(basename "$0")-bin + +LD_LIBRARY_PATH=${BF_DIST_BIN}/lib/mesa:${LD_LIBRARY_PATH} + +if [ -n "$LD_LIBRARYN32_PATH" ]; then + LD_LIBRARYN32_PATH=${BF_DIST_BIN}/lib/mesa:${LD_LIBRARYN32_PATH} +fi +if [ -n "$LD_LIBRARYN64_PATH" ]; then + LD_LIBRARYN64_PATH=${BF_DIST_BIN}/lib/mesa:${LD_LIBRARYN64_PATH} +fi +if [ -n "$LD_LIBRARY_PATH_64" ]; then + LD_LIBRARY_PATH_64=${BF_DIST_BIN}/lib/mesa:${LD_LIBRARY_PATH_64} +fi + +# Workaround for half-transparent windows when compiz is enabled +XLIB_SKIP_ARGB_VISUALS=1 + +export LD_LIBRARY_PATH LD_LIBRARYN32_PATH LD_LIBRARYN64_PATH LD_LIBRARY_PATH_64 LD_PRELOAD XLIB_SKIP_ARGB_VISUALS + +exec "$BF_DIST_BIN/$BF_PROGRAM" ${1+"$@"} diff --git a/blender/blender-symbolic.svg b/blender/blender-symbolic.svg new file mode 100644 index 0000000..70d0a91 --- /dev/null +++ b/blender/blender-symbolic.svg @@ -0,0 +1,55 @@ + + + + + + image/svg+xml + + + + + + + + + + + diff --git a/blender/blender-thumbnailer b/blender/blender-thumbnailer new file mode 100755 index 0000000..9181df0 Binary files /dev/null and b/blender/blender-thumbnailer differ diff --git a/blender/blender.desktop b/blender/blender.desktop new file mode 100644 index 0000000..792f98b --- /dev/null +++ b/blender/blender.desktop @@ -0,0 +1,89 @@ +[Desktop Entry] +Name=Blender +GenericName=3D modeler +GenericName[ar]=3D المنمذج ثلاثي الأبعاد +GenericName[ca]=Modelador 3D +GenericName[cs]=3D modelování +GenericName[da]=3D-modellering +GenericName[de]=3D-Modellierer +GenericName[el]=Μοντελοποιητής 3D +GenericName[es]=Modelador 3D +GenericName[et]=3D modelleerija +GenericName[fi]=3D-mallintaja +GenericName[fr]=Modeleur 3D +GenericName[gl]=Modelador 3D +GenericName[hu]=3D modellező +GenericName[it]=Modellatore 3D +GenericName[ja]=3D モデラー +GenericName[lt]=3D modeliavimas +GenericName[nb]=3D-modellering +GenericName[nl]=3D-modeller +GenericName[pl]=Modelowanie 3D +GenericName[pt_BR]=Modelador 3D +GenericName[ro]=Arhitect 3D +GenericName[ru]=Редактор 3D-моделей +GenericName[tr]=3D modelleyici +GenericName[uk]=Редактор 3D-моделей +GenericName[wa]=Modeleu 3D +GenericName[zh_CN]=3D 建模 +GenericName[zh_TW]=3D 模型 +Comment=3D modeling, animation, rendering and post-production +Comment[ar]=3D النمذجة، الرسوم المتحركة، والتجسيد، وما بعد الإنتاج +Comment[ast]=Modeláu 3D, animación, renderizáu y postproducción +Comment[eu]=3D modelatzea, animazioa, errendatzea eta post-produkzioa +Comment[be]=Праграма прасторавага мадэлявання, анімацыі, апрацоўкі відэа і давядзення відэапрадукцыі +Comment[bn]=ত্রিমাত্রিক মডেল, অ্যানিমেশন, রেন্ডারিং এবং পোস্ট-উৎপাদন +Comment[bs]=3D modeliranje, animacija, obrada i postprodukcija +Comment[bg]=3D моделиране, анимиране, рендиране и пост-продукция +Comment[ca]=Modelat 3D, animació, renderització i post-producció +Comment[ca@valencia]=Modelat 3D, animació, renderització i post-producció +Comment[crh]=3B modelleme, animasyon, işleme ve son üretim +Comment[cs]=3D modelování, animace, rederování a postprodukce +Comment[da]=3D-modellering, animation, rendering og efterbehandling +Comment[de]=3D-Modellierung, Animation, Rendering und Nachbearbeitung +Comment[nl]=3d-modelleren, animeren, renderen en post-productie +Comment[el]=Μοντελοποίηση 3D, κινούμενα σχέδια, αποτύπωση και οργάνωση διαδικασίας μετά-την-παραγωγή +Comment[eo]=3D-modelado, animacio, renderado kaj postproduktado +Comment[es]=Modelado 3D, animación, renderizado y post-producción +Comment[et]=Kolmemõõtmeline modelleerimine, animeerimine, esitlemine ja järeltöötlemine +Comment[fi]=3D-mallinnus, -animaatiot, -renderöinti ja -tuotanto +Comment[fr]=Modélisation 3D, animation, rendu et post-production +Comment[fr_CA]=Modélisation 3D, animation, rendu et post-production +Comment[gl]=Modelado 3D, animación, renderizado e postprodución +Comment[hu]=3D modellek és animációk létrehozása és szerkesztése +Comment[is]=Þrívíddarmódel, hreyfimyndir, myndgerð og frágangur myndskeiða +Comment[it]=Modellazione 3D, animazione, rendering e post-produzione +Comment[ja]=3Dモデリング、アニメーション、レンダリング、ポストプロダクションのツール +Comment[ko]=3D 모델링, 애니메이션, 렌더링과 포스트 프로덕션 +Comment[lt]=3D modeliavimas, animacijų kūrimas, atvaizdavimas ir tobulinimas +Comment[lv]=3D modelēšana, animācija, renderēšana un pēcapstrāde +Comment[ms]=Pemodelan, animasi, penerapan dan post-produksi 3D +Comment[nb]=3D-modellering, animasjon, rendering og postproduksjon +Comment[oc]=Modelizacion 3D, animacion, rendut e post-produccion +Comment[pl]=Modelowanie 3D, animacja, renderowanie i postprodukcja +Comment[pt]=Modelação 3D, animação, renderização e pós-produção +Comment[pt_BR]=Modelagem 3D, animação, renderização e pós-produção +Comment[ro]=Modelare, animare, afișare și post-producție 3D +Comment[ru]=3D-моделирование, анимация, рендеринг и компоновка +Comment[sl]=3D modeliranje, animacija, izrisovanje in nadaljnje obdelovanje +Comment[sq]=Animacion i modeleve 3D, rregullim dhe më pas prodhim +Comment[sr]=3Д моделовање, анимација, исцртавање и постпродукција +Comment[sv]=3d-modellering, animering, rendering och efterbehandling +Comment[ta]=முப்பரிமாண ஒப்புருவாக்கம், அசைவூட்டம், காட்சியாக்கம் மற்றும் உருவாக்கத்துக்கு பிந்தைய செயல்பாடுகள் +Comment[tg]=Моделсозии 3D, аниматсия, пешниҳод ва истеҳсоли баъдӣ +Comment[tr]=3B modelleme, animasyon, işleme ve son üretim +Comment[uk]=Програма просторового моделювання, анімації, обробки відео та доведення відеопродуктів +Comment[vi]=Tạo hình mẫu 3D, hoạt họa, dựng hình và các công việc hậu kỳ +Comment[wa]=Modelaedje 3D, animåcion, rindou eyet après-produccion +Comment[zh_HK]=3D 模型、動畫、算圖和後製 +Comment[zh_CN]=3D 建模、动画、渲染和后期制作 +Comment[zh_TW]=3D 模型、動畫、算圖和後製 +Keywords=3d;cg;modeling;animation;painting;sculpting;texturing;video editing;video tracking;rendering;render engine;cycles;game engine;python; +Exec=blender %f +Icon=blender +Terminal=false +Type=Application +PrefersNonDefaultGPU=true +X-KDE-RunOnDiscreteGpu=true +Categories=Graphics;3DGraphics; +MimeType=application/x-blender; diff --git a/blender/blender.svg b/blender/blender.svg new file mode 100644 index 0000000..1fed4b9 --- /dev/null +++ b/blender/blender.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/blender/cal_foot_position.py b/blender/cal_foot_position.py new file mode 100644 index 0000000..3fb6b97 --- /dev/null +++ b/blender/cal_foot_position.py @@ -0,0 +1,678 @@ +import os, sys, bpy, math, time, platform, cairosvg, ppf.datamatrix, shutil, requests, json, redis, oss2, heapq +import matplotlib.pyplot as plt +from PIL import Image +import numpy as np +from addon_utils import enable +enable('io_import_images_as_planes') +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +sys.path.append('../libs\\') +import config + +def gen_data_matrix(print_id, qr_path, size = 300): + svg = ppf.datamatrix.DataMatrix(f'p{print_id}').svg() + cairosvg.svg2png(bytestring=svg, write_to=qr_path, output_width=size, output_height=size, background_color='white') + +def active_object(obj): + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + +def down_obj_fromoss(pid, print_type=1, order_id=None): + # print_type:// 打印状态 1:正常打印 2:重打 3:加打,4: 样品 + print('开始下载obj文件...' , pid) + + if not order_id is None: + path = os.path.join(workdir, f'{pid}_{order_id}') + else: + path = os.path.join(workdir, pid) + if not os.path.exists(path): os.makedirs(path) + + # 根据前缀获取文件列表 + prefix = f'objs/print/{pid}/' + filelist = oss2.ObjectIteratorV2(config.oss_bucket, prefix=prefix) + find = False + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'{pid}.obj'): + find = True + localfile = os.path.join(path, filename) + res = config.oss_bucket.get_object_to_file(file.key, localfile) + print(f'下载文件:{file.key},状态:{res.status}') + + if not find: + for file in os.listdir(path): + if file.endswith('.obj'): + print('找到其他obj文件,采用这个文件来生成需要的尺寸', file) + shutil.copy(os.path.join(path, file), os.path.join(path, f'{pid}.obj')) + find = True + break + if not find: + print('找不到obj文件,异常退出') + sys.exit(1) + +def find_obj(pid, order_id): + find = False + if not os.path.exists(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.mtl')): + print('没有找到obj模型文件,开始下载') + down_obj_fromoss(pid, order_id=order_id) + if os.path.exists(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.jpg')): + shutil.move(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.jpg'), os.path.join(workdir, f'{pid}_{order_id}', f'{pid}Tex1.jpg')) + with open(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.mtl'), 'r') as f: + lines = f.readlines() + lines = [line.replace(f'map_Kd {pid}.jpg', f'map_Kd {pid}Tex1.jpg') for line in lines] + with open(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.mtl'), 'w') as f: + f.writelines(lines) + + filelist = os.listdir(os.path.join(workdir, f'{pid}_{order_id}')) + for filename in filelist: + if '9cm' in filename: + find = True + return filename + for filename in filelist: + if f'{pid}.obj' in filename: + find = True + return filename + for filename in filelist: + if '.obj' in filename: + find = True + return filename + print('没有找到obj模型文件') + return '' + +def find_pid_objname(pid): + for obj in bpy.data.objects: + if obj.name.startswith(str(pid)): + return obj.name + +def get_obj_max_foot(): + filename = find_obj(pid, order_id) + + filename = os.path.join(workdir, f'{pid}_{order_id}', filename) + bpy.ops.wm.read_homefile() + bpy.context.preferences.view.language = 'en_US' + bpy.ops.object.delete(use_global=False, confirm=False) + bpy.ops.import_scene.obj(filepath=filename) + bpy.context.scene.unit_settings.scale_length = 0.001 + bpy.context.scene.unit_settings.length_unit = 'CENTIMETERS' + bpy.context.scene.unit_settings.mass_unit = 'GRAMS' + + obj = bpy.context.selected_objects[0] + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + + pid_objname = find_pid_objname(pid) + + scale = 90 / obj.dimensions.y + obj.scale = (scale, scale, scale) + + bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_1', align_axis={'Z'}) + + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + obj.location[0] = 0 + obj.location[1] = 0 + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + + if pid in ('76461', '98871', '112139'): + bpy.ops.mesh.primitive_plane_add(size=200, enter_editmode=False, align='WORLD', location=(0, 0, 0.6), scale=(1, 1, 1)) + else: + bpy.ops.mesh.primitive_plane_add(size=200, enter_editmode=False, align='WORLD', location=(0, 0, 0.2), scale=(1, 1, 1)) + # bpy.ops.wm.save_as_mainfile(filepath=os.path.join(workdir, f'{pid}_{order_id}', f'{pid}_{order_id}.blend')) + bpy.ops.object.modifier_add(type='BOOLEAN') + bpy.context.object.modifiers["Boolean"].object = bpy.data.objects[pid_objname] + bpy.context.object.modifiers["Boolean"].operation = 'INTERSECT' + bpy.context.object.modifiers["Boolean"].solver = 'FAST' + bpy.ops.object.modifier_apply(modifier="Boolean") + + bpy.ops.mesh.separate(type='LOOSE') + + max_area = 0 + for obj in bpy.data.objects: + if obj.type == 'MESH' and obj.name.startswith('Plane'): + if len(obj.data.polygons) == 0: continue + area = obj.data.polygons[0].area + if area > max_area: + max_area = area + obj.name = 'foot' + print(f'最大脚底板面积: {max_area} cm²') + if max_area < 5: + print('最大脚底板面积太小,脚底模型可能有破损,进行再次处理') + numsTemp = 0 + #最多执行三次 重新处理 + while numsTemp < 3: + #每次削的比例要加上去 + tempArea = 0.2+numsTemp*0.2 + max_area = check_and_deal_foot_area(tempArea,pid_objname) + if max_area >= 5: + break + numsTemp += 1 + + if max_area < 8: + print('最大脚底板面积处理多次还没有得到理想的面积,退出') + #移除该脚底板的面积处理 + res = requests.get(f'https://mp.api.suwa3d.com/api/footCode/deleteByPid?pid={pid}') + os.system(f'blender -b -P fill_dm_code.py') + return + # bpy.ops.wm.save_as_mainfile(filepath=os.path.join(workdir, f'{pid}_{order_id}', f'{pid}_{order_id}.blend')) + active_object(bpy.data.objects['foot']) + foot_points = get_plane_points(bpy.data.objects['foot']) + # plot(get_plane_points(bpy.data.objects['foot']), 'blue') + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + + # print(f"location: {bpy.data.objects['foot'].location}") + bpy.ops.import_image.to_plane(files=[{"name":"qr.png"}], directory=f"{workdir}{pid}_{order_id}", relative=False) + # bpy.ops.mesh.primitive_plane_add(size=1, enter_editmode=False, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) + + # print(f"new_location: {bpy.data.objects['foot'].location}") + + active_object(bpy.data.objects['qr']) + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + bpy.data.objects['qr'].rotation_euler[0] = 0 + bpy.data.objects['qr'].location = bpy.data.objects['foot'].location + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + # bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + # print(f"qr_location: {bpy.data.objects['qr'].location}") + # print(f'qr_points: {get_plane_points(bpy.data.objects["qr"])}') + # plot(get_plane_points(bpy.data.objects['qr']), 'red') + return foot_points + +#检测到脚底板面积小于5cm²,重新调几次程序再次处理,如果还不行就退出 +def check_and_deal_foot_area(tempArea,pid_objname): + bpy.ops.mesh.primitive_plane_add(size=200, enter_editmode=False, align='WORLD', location=(0, 0, tempArea), scale=(1, 1, 1)) + bpy.ops.object.modifier_add(type='BOOLEAN') + bpy.context.object.modifiers["Boolean"].object = bpy.data.objects[pid_objname] + bpy.context.object.modifiers["Boolean"].operation = 'INTERSECT' + bpy.context.object.modifiers["Boolean"].solver = 'FAST' + bpy.ops.object.modifier_apply(modifier="Boolean") + + bpy.ops.mesh.separate(type='LOOSE') + + max_area = 0 + for obj in bpy.data.objects: + if obj.type == 'MESH' and obj.name.startswith('Plane'): + if len(obj.data.polygons) == 0: continue + area = obj.data.polygons[0].area + if area > max_area: + max_area = area + obj.name = 'foot' + print("再次处理脚底板得到的面积:" + str(max_area) + "cm²") + return max_area + + +def euclidean_distance(p1, p2): + return ((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2) ** 0.5 + +def nearest_neighbor_sort(points): + print('nearest neighbor sort') + n = len(points) + visited = set() + sorted_points = [] + i = 1 + # Start from the first point + current_point = points[0] + + #while len(visited) < n: + while i < n: + i += 1 + sorted_points.append(current_point) + visited.add(current_point) + + # Create a priority queue to store distances and points + distance_queue = [] + for point in points: + if point not in visited: + distance = euclidean_distance(current_point, point) + heapq.heappush(distance_queue, (distance, point)) + + # Find the nearest unvisited point + while distance_queue: + distance, next_point = heapq.heappop(distance_queue) + if next_point not in visited: + current_point = next_point + break + + + return sorted_points + +def get_max_qr(foot_points): + + def dis_flag(square, foot_points): + for point in foot_points: + dis0 = get_distance_from_point_to_line(point, square[0], square[1]) + dis1 = get_distance_from_point_to_line(point, square[1], square[2]) + dis2 = get_distance_from_point_to_line(point, square[2], square[3]) + dis3 = get_distance_from_point_to_line(point, square[3], square[0]) + min_dis = min([dis0, dis1, dis2, dis3]) + return min_dis > 0.5 + + def get_distance_from_point_to_line(point, line_point1, line_point2): + # 对于两点坐标为同一点时,返回点与点的距离 + if line_point1 == line_point2: + point_array = np.array(point) + point1_array = np.array(line_point1) + return np.linalg.norm(point_array - point1_array) + # 计算直线的三个参数 + A = line_point2[1] - line_point1[1] + B = line_point1[0] - line_point2[0] + C = (line_point1[1] - line_point2[1]) * line_point1[0] + \ + (line_point2[0] - line_point1[0]) * line_point1[1] + # 根据点到直线的距离公式计算距离 + distance = np.abs(A * point[0] + B * point[1] + C) / (np.sqrt(A ** 2 + B ** 2)) + return distance + + # 判断方形是否在轮廓内 + def square_in_polygon_default(square, polygon): + for point in square: + if not point_in_polygon(point, polygon): + return False + return True + + # 自定义二维码初始坐标 + def get_default_qr_points(foot_points): + max_x = max([x[0] for x in foot_points]) + min_x = min([x[0] for x in foot_points]) + max_y = max([x[1] for x in foot_points]) + min_y = min([x[1] for x in foot_points]) + + center_x, center_y = (max_x + min_x) / 2, (max_y + min_y) / 2 + flag_default = point_in_polygon((center_x, center_y), foot_points) + if not flag_default: + index_move = 0 + while not flag_default and index_move < 5: + center_x = (center_x + min_x) / 2 + index_move += 1 + flag_default = point_in_polygon((center_x, center_y), foot_points) + if not flag_default: + while not flag_default: + center_y = (center_y + min_y) / 2 + flag_default = point_in_polygon((center_x, center_y), foot_points) + + length = min((center_x - min_x) / 2, (center_y - min_y) / 2) / 2 + # 在不规则平面中心位置初始化一个方形 + qr_points = [(center_x - length, center_y + length), (center_x + length, center_y + length), (center_x + length, center_y - length), (center_x - length, center_y - length)] + qr_points = scale_qr_new(foot_points, qr_points, length, (center_x, center_y), scale=1.05) + return qr_points + + def scale_qr_new(foot_points, qr_points, length, center, scale=1.1): + default_flag = flag = square_in_polygon(qr_points, foot_points) + center_x, center_y = center[0], center[1] + if flag: + while default_flag == flag: + length *= scale + # 对每个点进行放大操作并更新坐标 + qr_points = [((x - center_x) * scale + center_x, (y - center_y) * scale + center_y) for x, y in qr_points] + flag = square_in_polygon_default(qr_points, foot_points) and square_in_polygon(qr_points, foot_points) + else: + while default_flag == flag: + length /= scale + # 对每个点进行缩小操作并更新坐标 + qr_points = [((x - center_x) / scale + center_x, (y - center_y) / scale + center_y) for x, y in qr_points] + flag = square_in_polygon_default(qr_points, foot_points) and square_in_polygon(qr_points, foot_points) + return qr_points + + # 获取旋转后方形 根据方形原坐标旋转 + def cal_rota_points(qr_points, center, angle): + center_x, center_y = center[0], center[1] + if angle > 0: + qr_points_after_rotate = [] + for point in qr_points: + new_x = (point[0] - center_x) * math.cos(angle) - (point[1] - center_y) * math.sin(angle) + center_x + new_y = (point[0] - center_x) * math.sin(angle) + (point[1] - center_y) * math.cos(angle) + center_y + qr_points_after_rotate.append((new_x, new_y)) + return qr_points_after_rotate + else: + return qr_points + + # 取中点 + def cal_middle_point(p1, p2): + x1, y1 = p1 + x2, y2 = p2 + # 中点 + a1 = (x1 + x2) / 2 + b1 = (y1 + y2) / 2 + return a1, b1 + + def make_points(qr_points): + new_points = [] + index = [0, 1, 2, 3, 0] + for i in range(4): + a, b = cal_middle_point(qr_points[index[i]], qr_points[index[i + 1]]) + new_points.append((a, b)) + new_points.append((cal_middle_point(qr_points[index[i]], (a, b)))) + new_points.append((cal_middle_point(qr_points[index[i + 1]], (a, b)))) + return new_points + + #qr_points = get_default_qr_points(foot_points) + + min_qr_length = 0.5 + + minx = min([p[0] for p in foot_points]) + min_qr_length + maxx = max([p[0] for p in foot_points]) - min_qr_length + miny = min([p[1] for p in foot_points]) + min_qr_length + maxy = max([p[1] for p in foot_points]) - min_qr_length + + def rotate_qr_v3(foot_points, qr_points, scale, angle=1): + best_length = length = cal_square_length(qr_points) + best_angle, default_angle = 0, 0 + center_x, center_y = calculate_center(qr_points) + best_qr_points = qr_points + # 循环1 求最佳angle 不断增大angle角度 + while default_angle <= 90: + qr_points_after_rotate = cal_rota_points(qr_points, (center_x, center_y), default_angle) + # 在当前angle下增加边长 + while square_in_polygon(qr_points_after_rotate, foot_points) and dis_flag(qr_points_after_rotate, foot_points): + flag = True + best_qr_points = qr_points_after_rotate + best_angle = default_angle + best_length = length + # 对每个点进行放大(或缩小)操作并更新坐标 + qr_points = [((x - center_x) * scale + center_x, (y - center_y) * scale + center_y) for x, y in qr_points] + length *= scale + qr_points_after_rotate = cal_rota_points(qr_points, (center_x, center_y), default_angle) + # 限制最大边长 + if best_length > 5: + return best_qr_points, best_angle, best_length + + default_angle += angle + return best_qr_points, best_angle, best_length + + if maxx - minx < maxy - miny: + step = (maxx - minx) / 15 + else: + step = (maxy - miny) / 15 + + x, y = minx, miny + locations = [] + + while x <= maxx: + while y <= maxy: + locations.append((x, y)) + y += step + x += step + y = miny + + # print(f'locations: {locations}') + locations = [point for point in locations if all(cal_distance(point, f) >= min_qr_length for f in foot_points)] + location = locations[0] + qr_points = [(location[0] - 0.5, location[1] - 0.5), (location[0] + 0.5, location[1] - 0.5), (location[0] + 0.5, location[1] + 0.5), (location[0] - 0.5, location[1] + 0.5)] + plot(foot_points) + plot(qr_points, 'yellow') + plt.savefig(f'{workdir}{pid}_{order_id}/fig.png') + + best_qr, max_qr_length, best_location, best_rotation = None, 0, None, 0 + for location in locations: + plt.plot(location[0], location[1], 'ro') + qr_points = move_square(qr_points, location) + if not square_in_polygon(qr_points, foot_points) or not square_in_polygon_default(qr_points, foot_points): + continue + else: + # qr_points = scale_qr(foot_points, qr_points, 1.1) + # qrs.append(qr_points) + rotate_qr, rotate_angle, qr_length = rotate_qr_v3(foot_points, qr_points, 1.1, 1) + if qr_length > max_qr_length: + max_qr_length = qr_length + best_location = location + best_rotation = rotate_angle + best_qr = rotate_qr + + rd = max_qr_length / 1.1 / 2 + x, y = best_location[0], best_location[1] + new_qr_points = [(x - rd, y + rd), (x + rd, y + rd), (x + rd, y - rd), (x - rd, y - rd)] + new_qr_points = cal_rota_points(new_qr_points, best_location, best_rotation) + return new_qr_points, best_location, max_qr_length / 1.1, best_rotation + +def get_plane_points(plane, print_points = False): + points = [] + for edge in plane.data.edges: + point_index = edge.vertices[0] + point3d = plane.data.vertices[point_index].co + if print_points: print(point3d) + points.append((point3d[0], point3d[1])) + return points + +def point_in_polygon(point, polygon): + num_intersections = 0 + for i in range(len(polygon)): + p1, p2 = polygon[i], polygon[(i + 1) % len(polygon)] + if (p1[1] > point[1]) != (p2[1] > point[1]): + if point[0] < (p2[0] - p1[0]) * (point[1] - p1[1]) / (p2[1] - p1[1]) + p1[0]: + num_intersections += 1 + return num_intersections % 2 == 1 + +def square_iou_polygon(square, polygon): + for point in square: + if point_in_polygon(point, polygon): + return True + return False + +def square_in_polygon(square, polygon): + for point in polygon: + if point_in_polygon(point, square): + return False + return True + +def plot(points, color='blue'): + x = [point[0] for point in points] + y = [point[1] for point in points] + if points[-1] != points[0]: + x.append(points[0][0]) + y.append(points[0][1]) + plt.plot(x, y, color=color) + +def scale_qr(foot_points, qr_points, scale = 1.1): + while True: + old_points = qr_points + # 计算正方形的中心坐标 + center_x = sum(x for x, y in qr_points) / len(qr_points) + center_y = sum(y for x, y in qr_points) / len(qr_points) + + # 对每个点进行放大(或缩小)操作并更新坐标 + qr_points = [((x - center_x) * scale + center_x, (y - center_y) * scale + center_y) for x, y in qr_points] + + if not square_in_polygon(qr_points, foot_points): + qr_points = old_points + break + return qr_points + +def rotate_qr(foot_points, qr_points, angle = 0.1): + while True: + old_points = qr_points + # 计算正方形的中心坐标 + center_x = sum(x for x, y in qr_points) / len(qr_points) + center_y = sum(y for x, y in qr_points) / len(qr_points) + + # 对每个点进行放大(或缩小)操作并更新坐标 + qr_points = [(x - center_x, y - center_y) for x, y in qr_points] + + qr_points = [(x * math.cos(angle) - y * math.sin(angle), x * math.sin(angle) + y * math.cos(angle)) for x, y in qr_points] + + qr_points = [(x + center_x, y + center_y) for x, y in qr_points] + + if not square_in_polygon(qr_points, foot_points): + qr_points = old_points + break + return qr_points + +def scale_square(scale, foot_points, back = 0.0): + while True: + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + old_dimensions = bpy.data.objects['qr'].dimensions.copy() + active_object(bpy.data.objects['qr']) + bpy.data.objects['qr'].scale = (scale, scale, 1) + max_square = get_plane_points(bpy.data.objects['qr']) + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + if not square_in_polygon(max_square, foot_points): + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + bpy.data.objects['qr'].dimensions = (old_dimensions[0] - back, old_dimensions[1] - back, 0) + max_square = get_plane_points(bpy.data.objects['qr']) + location, size = get_square_center_size() + break + return max_square, location, size + +def zoom_square(foot_points, qr_points, center, step_length=0.1): + while True: + old_dimensions = bpy.data.objects['qr'].dimensions.copy() + active_object(bpy.data.objects['qr']) + # print(f'old_dimensions: {old_dimensions}') + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + bpy.data.objects['qr'].dimensions = (old_dimensions[0] + step_length, old_dimensions[1] + step_length, 0) + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + # print(f'new_dimensions: {bpy.data.objects["qr"].dimensions}') + max_square = get_plane_points(bpy.data.objects['qr']) + if not square_in_polygon(max_square, foot_points): + bpy.data.objects['qr'].dimensions = (old_dimensions[0], old_dimensions[1], 0) + max_square = get_plane_points(bpy.data.objects['qr']) + location, size, length = get_square_center_size() + break + return max_square, location, size, length + +def get_square_center_size(): + active_object(bpy.data.objects['qr']) + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + location = bpy.data.objects['qr'].location + size = bpy.data.objects['qr'].dimensions + length = size[0] + return location, size, length + +def min_x(plane): + return min([p[0] for p in plane]) +def min_y(plane): + return min([p[1] for p in plane]) +def max_x(plane): + return max([p[0] for p in plane]) +def max_y(plane): + return max([p[1] for p in plane]) + +def cal_square_length(square): + return abs(square[0][0] - square[1][0]) + +def cal_square_area(square): + return abs(square[0][0] - square[1][0]) * abs(square[0][1] - square[3][1]) + +def cal_distance(point1, point2): + return math.sqrt((point1[0] - point2[0])**2 + (point1[1] - point2[1])**2) + +def calculate_center(vertices): + x_sum = sum(x for x, y in vertices) + y_sum = sum(y for x, y in vertices) + center_x = x_sum / len(vertices) + center_y = y_sum / len(vertices) + return center_x, center_y + +def move_square(vertices, new_center): + center_x, center_y = calculate_center(vertices) + # print(f'center_x: {center_x}, center_y: {center_y}') + # print(f'new_center: {new_center}') + x_diff = center_x - new_center[0] + y_diff = center_y - new_center[1] + # print(f'x_diff: {x_diff}, y_diff: {y_diff}') + # print(f'vertices: {vertices}') + new_vertices = [(x - x_diff, y - y_diff) for x, y in vertices] + # print(f'new_vertices: {new_vertices}') + return new_vertices + +def main(workdir, pid, order_id, print_id): + if not os.path.exists(os.path.join(workdir, f'{pid}_{order_id}')): + os.makedirs(os.path.join(workdir, f'{pid}_{order_id}')) + qr_path = os.path.join(workdir, f'{pid}_{order_id}' ,'qr.png') + gen_data_matrix(print_id, qr_path) + try: + get_obj_max_foot() + except Exception as e: + print(f"get obj max foot err {e}") + res = requests.get(f'https://mp.api.suwa3d.com/api/footCode/deleteByPid?pid={pid}') + os.system(f'blender -b -P fill_dm_code.py') + return + + qr_points = get_plane_points(bpy.data.objects['qr']) + active_object(bpy.data.objects['foot']) + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + foot_points = get_plane_points(bpy.data.objects['foot']) + # print('foot_points:', foot_points) + foot_points = nearest_neighbor_sort(foot_points) + try: + max_qr, qr_location, max_qr_length, rotation = get_max_qr(foot_points) + except Exception as e: + print("异常处理错误") + res = requests.get(f'https://mp.api.suwa3d.com/api/footCode/deleteByPid?pid={pid}') + os.system(f'blender -b -P fill_dm_code.py') + return + print(f'qr_location: {qr_location}') + plt.plot(qr_location[0], qr_location[1], 'black') + plot(max_qr, 'green') + plt.axis('equal') + plt.savefig(os.path.join(workdir, f'{pid}_{order_id}', 'fig.png')) + + bpy.ops.wm.save_as_mainfile(filepath=f'{workdir}{pid}_{order_id}/{pid}_qr_start.blend') + + qr_position = {} + qr_location = (qr_location[0], qr_location[1], 0) + qr_dimensions = (max_qr_length, max_qr_length, 0) + # print(f'qr_location: {qr_location}') + # print(f'qr_dimensions: {qr_dimensions}') + qr_position["location"] = qr_location + qr_position["dimensions"] = qr_dimensions + qr_position["rotation"] = rotation + print(f'qr_position: {qr_position}') + # with open(os.path.join(workdir, f'{pid}_{order_id}', 'qr_position.txt'), 'w') as f: + # f.write(json.dumps(qr_position)) + + res = requests.get(f'{upload_qr_position_url}?print_id={print_id}&position_data={json.dumps(qr_position)}') + print(f'update_qr_position_url {upload_qr_position_url}:{res.text}') + + bpy.ops.object.load_reference_image(filepath=os.path.join(workdir, f'{pid}_{order_id}', 'qr.png')) + bpy.context.object.rotation_euler = (math.radians(-180), math.radians(0), rotation) + bpy.ops.transform.translate(value=qr_location, orient_type='GLOBAL', orient_matrix=((1, 0, 0), (0, 1, 0), (0, 0, 1)), orient_matrix_type='GLOBAL', mirror=False, use_proportional_edit=False, proportional_edit_falloff='SMOOTH', proportional_size=1, use_proportional_connected=False, use_proportional_projected=False, snap=False, snap_elements={'INCREMENT'}, use_snap_project=False, snap_target='CLOSEST', use_snap_self=True, use_snap_edit=True, use_snap_nonedit=True, use_snap_selectable=False, release_confirm=True) + + bpy.context.object.empty_display_size = qr_dimensions[0] + + # for obj in bpy.data.objects: + # if obj.type == 'MESH' and obj.name != pid: + # bpy.data.objects.remove(obj) + + # qr_path = os.path.join(workdir,f'{pid}_{order_id}', f"{pid}_{order_id}Tex1_qr.png") + # jpg_path = os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1.jpg") + # jpg_img = Image.open(jpg_path) + # shutil.copyfile(jpg_path, os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1_noqr.jpg")) + + # bpy.context.scene.eyek.res_x = jpg_img.width + # bpy.context.scene.eyek.res_y = jpg_img.height + # bpy.context.scene.eyek.path_export_image = qr_path + # bpy.data.objects[f'{pid}'].select_set(True) + # bpy.data.objects['Empty'].select_set(True) + # bpy.context.view_layer.objects.active = bpy.data.objects[f'{pid}'] + # bpy.ops.eyek.exe() + + # qr_img = Image.open(qr_path) + # jpg_img.paste(qr_img, (0, 0), qr_img) + # jpg_img.save(jpg_path) + + # plt.axis('equal') + # plt.show() + + # 保存blend文件 + bpy.ops.wm.save_as_mainfile(filepath=f'{workdir}{pid}_{order_id}/{pid}_qr_end.blend') + bpy.ops.wm.quit_blender() + + +if __name__ == '__main__': + get_qr_position_url = 'https://mp.api.suwa3d.com/api/printOrder/getFootCodePositionData' + upload_qr_position_url = 'https://mp.api.suwa3d.com/api/printOrder/updateFootCodeStatus' + get_pid_by_printid_url = 'https://mp.api.suwa3d.com/api/printOrder/getPidByPrintId' + delete_form_foot_code_by_pid = 'https://mp.api.suwa3d.com/api/printOrder/deleteFormFootCodeByPid' + # get_qr_position_url = 'http://172.31.1.254:8199/api/printOrder/getFootCodePositionData' + # upload_qr_position_url = 'http://172.31.1.254:8199/api/printOrder/updateFootCodeStatus' + # get_pid_by_printid_url = 'http://172.31.1.254:8199/api/printOrder/getPidByPrintId' + + if platform.system() == 'Windows': + workdir = 'E:\\print\\foot\\' + else: + workdir = '/data/datasets/foot/' + + print(sys.argv) + if len(sys.argv) - (sys.argv.index("--") + 1) < 1: + print("Usage: blender -b -P auto_dm.py -- ") + sys.exit(1) + pid, order_id, print_id = sys.argv[sys.argv.index("--") + 1].split('_') + + main(workdir, pid, order_id, print_id) \ No newline at end of file diff --git a/blender/copyright.txt b/blender/copyright.txt new file mode 100644 index 0000000..062ab0c --- /dev/null +++ b/blender/copyright.txt @@ -0,0 +1,39 @@ + + This version of Blender has been originally released at www.blender.org. + It is subject to the GNU GPL license, which is part of this download. + + Blender, the free and open source 3D creation suite + Copyright (C) 2023 Blender Foundation + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + The Blender project uses code and libraries which are licensed under terms of + licenses which are compatible with the GNU GPL 3 or later. + + The license of each individual Blender source file can be found in the first + few lines after "SPDX-License-Identifier:". The license text of every such + license can be found in license/. + + The complete overview of copyright and licenses of third-party libraries used + by Blender can be found in THIRD-PARTY-LICENSES.txt. + + Stichting Blender Foundation + Amsterdam + the Netherlands + Chamber of Commerce: 34176425 + + http://www.blender.org + foundation@blender.org + diff --git a/blender/debug.Text.py b/blender/debug.Text.py new file mode 100644 index 0000000..4676f27 --- /dev/null +++ b/blender/debug.Text.py @@ -0,0 +1,91 @@ +import bpy, sys, os, math + +pid = '26385' +workdir = '/home/water/Downloads/' + +filename = f'{workdir}{pid}/{pid}_9cm_x1.obj' +bpy.ops.import_scene.obj(filepath=filename) +# 坐标复位 +obj = bpy.context.selected_objects[0] +obj.rotation_euler[0] = 0 +bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) +bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_VOLUME', center='MEDIAN') +bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_1', align_axis={'Y', 'Z'}) +bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) +# bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}_align_yz.obj') + +# 躺平到打印机排版需要的坐标与角度 +obj.rotation_euler = (math.radians(90), math.radians(90), 0) +bpy.ops.object.transform_apply(rotation=True) +# bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}_rotate_y90.obj') + +heights = {} +min_height = 999999 +min_i = 0 +max_height = -999999 +max_i = 0 + +bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_VOLUME', center='MEDIAN') +bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_3', align_axis={'X', 'Y', 'Z'}) + +# 步进精度2旋转X轴到180度,找到Y轴最低点和最高点,其中最低点为打印 +step = 2 +i = 0 +while i <= 180: + obj.rotation_euler = (math.radians(step), 0, 0) + bpy.ops.object.transform_apply(rotation=True) + if obj.dimensions[1] < min_height: + min_height = obj.dimensions[1] + min_i = i + if obj.dimensions[1] > max_height: + max_height = obj.dimensions[1] + max_i = i + heights[i] = (obj.dimensions[0], obj.dimensions[1], obj.dimensions[2]) + print(i, heights[i]) + i += step + +obj.rotation_euler = (0, 0, 0) +bpy.ops.object.transform_apply(rotation=True) +obj.rotation_euler = (math.radians(min_i), 0, 0) +bpy.ops.object.transform_apply(rotation=True) +#bpy.ops.export_scene.obj(filepath=f'{workdir}{pid}_miny.obj') +print(f'最小高度: {min_height} @ {heights[min_i]}min_i:{min_i}' , f'最大高度: {max_height} @ {heights[max_i]}max_i:{max_i}') + +offset = 45.5 +radian = math.radians(90) +bpy.ops.mesh.primitive_plane_add(size=200, enter_editmode=False, align='WORLD', location=(offset, 0, 0), rotation=(0, radian, 0), scale=(1, 1, 1)) + +# 布尔切割,保留交集切面 +bpy.ops.object.modifier_add(type='BOOLEAN') +bpy.context.object.modifiers["Boolean"].object = bpy.data.objects[pid] +bpy.context.object.modifiers["Boolean"].operation = 'INTERSECT' +bpy.context.object.modifiers["Boolean"].solver = 'FAST' +bpy.ops.object.modifier_apply(modifier="Boolean") + +# 拆分切割面为多个多边形,然后遍历多边形,找到最大的面积 +bpy.ops.mesh.separate(type='LOOSE') + +max_area = 0 +max_obj = None +for obj in bpy.data.objects: + if obj.type == 'MESH' and obj.name.startswith('Plane'): + area = obj.data.polygons[0].area + if area > max_area: + max_area = area + max_obj = obj + +# 选中最大面积的多边形,然后计算中心点 +bpy.ops.object.select_all(action='DESELECT') +max_obj.select_set(True) +bpy.context.view_layer.objects.active = max_obj +bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY') + +bpy.ops.import_scene.obj(filepath=f'{workdir}{pid}/qrcode.obj') +qr_obj = bpy.data.objects['qrcode'] +shore_obj = bpy.data.objects['Cube.001'] +qr_obj.location = (max_obj.location[0] - qr_obj.dimensions[0] / 2 - shore_obj.dimensions[0], max_obj.location[1], max_obj.location[2]) +shore_obj.location = (qr_obj.location[0]-0.01, max_obj.location[1], max_obj.location[2]) + +for obj in bpy.data.objects: + if obj.type == 'MESH' and obj.name.startswith('Plane'): + bpy.data.objects.remove(obj) \ No newline at end of file diff --git a/blender/eyek_cache/1711953677284273800/cameras.json b/blender/eyek_cache/1711953677284273800/cameras.json new file mode 100644 index 0000000..38fd989 --- /dev/null +++ b/blender/eyek_cache/1711953677284273800/cameras.json @@ -0,0 +1 @@ +{"data": [{"location": {"x": 7.40227746963501, "y": -0.10000000149011612, "z": 4.669338226318359}, "rotation_euler": {"x": 1.570796251296997, "y": 3.0, "z": -0.0}, "scale": {"x": 4.177248001098633, "y": 4.177248001098633, "z": 4.177248001098633}, "fov_x": -4.177248001098633, "limit_near": 0.009999999776482582, "limit_far": 200.0, "image_path": "E:\\print\\foot\\152696_56145\\qr.png"}]} \ No newline at end of file diff --git a/blender/fill_dm_code.py b/blender/fill_dm_code.py new file mode 100644 index 0000000..9b443a3 --- /dev/null +++ b/blender/fill_dm_code.py @@ -0,0 +1,715 @@ +import os, sys, bpy, math, time, platform, cairosvg, ppf.datamatrix, shutil, requests, json, redis, oss2, cv2,qrcode +from retrying import retry +import subprocess +import random +import numpy as np +import matplotlib.pyplot as plt +from PIL import Image, ImageEnhance +from addon_utils import enable +import logging,atexit,platform +# if platform.system() == 'Windows': +sys.path.append('/home/acprint/code/libs/') +import common +logging.basicConfig(filename='foot_update_res.log', level=logging.ERROR) +enable('io_import_images_as_planes') +enable('eyek_addon') + + +#查询影棚ID +def getPSid(pid): + res = requests.get("https://mp.api.suwa3d.com/api/customerP3dLog/photoStudio",params={"pid":pid}) + res = json.loads(res.text) + return str(res['data']) + + +def restart_current_process(new_command): + try: + # 保存新进程的命令 + command = new_command.split() + + # 启动新进程 + new_process = subprocess.Popen(command) + + # 打印新进程的PID + print(f"New process started with PID: {new_process.pid}") + + # 终止当前进程 + os._exit(0) + + except Exception as e: + print(f"An error occurred: {e}") + +#生成二维码图片 +def gen_data_matrix(print_id,pid, qr_path, size = 300): + psid = getPSid(pid) + if int(psid) == 0: + print("费工夫构建脚底二维码") + # if use_foot_type == "short_url": + #调用接口获取短网址信息 + short_url = get_short_url(print_id) + if short_url == False: + return + temp_foot_data = short_url + + #生成二维码 + qr = qrcode.QRCode( + version=1, + error_correction = qrcode.constants.ERROR_CORRECT_L, + box_size=10, + border=2, + ) + qr.add_data(temp_foot_data) + qr.make(fit=True) + img = qr.make_image(fill_color="black",back_color="white").resize((size,size)) + img.save(qr_path) + else: + #正常生成不是常规的二维码 + print("排除费工夫正常构建二维码") + svg = ppf.datamatrix.DataMatrix(f'p{print_id}').svg() + cairosvg.svg2png(bytestring=svg, write_to=qr_path, output_width=size, output_height=size, background_color='white') + +def active_object(obj): + bpy.context.view_layer.objects.active = obj + obj.select_set(True) +#下载oss的文件 +@retry(stop_max_attempt_number=10, wait_fixed=2000) +def down_obj_fromoss(pid, print_type=1, order_id=None): + # print_type:// 打印状态 1:正常打印 2:重打 3:加打,4: 样品 + print(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} pid: {pid} 开始下载模型,若网络异常将每间隔2秒重试10次...') + if not order_id is None: + path = os.path.join(workdir, f'{pid}_{order_id}') + else: + path = os.path.join(workdir, pid) + if os.path.exists(path): + print(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} pid: {pid} 已存在模型文件,删除后重新下载') + shutil.rmtree(path, ignore_errors=True) + os.makedirs(path) + + # 下载分2种情况,一种是第一次打印,下载标准{pid}.obj,{pid}.mtl,{pid}Tex1.jpg,另一种是重打或加打,obj文件名可以从oss上任意获取一个,但是mtl和jpg文件名是固定的 + res = oss_client.get_object_to_file(f'objs/print/{pid}/{pid}.mtl', os.path.join(path, f'{pid}.mtl')) + last_modified = oss_client.get_object_meta(f"objs/print/{pid}/{pid}.mtl").last_modified + print(f'mtl文件最后修改时间:{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(last_modified))}') + print(f'下载文件:objs/print/{pid}/{pid}.mtl,状态:{res.status}') + if oss_client.object_exists(f'objs/print/{pid}/{pid}Tex1.jpg'): + res = oss_client.get_object_to_file(f'objs/print/{pid}/{pid}Tex1.jpg', os.path.join(path, f'{pid}Tex1.jpg')) + last_modified = oss_client.get_object_meta(f"objs/print/{pid}/{pid}Tex1.jpg").last_modified + print(f'jpg文件最后修改时间:{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(last_modified))}') + print(f'下载文件:objs/print/{pid}/{pid}Tex1.jpg,状态:{res.status}') + else: + res = oss_client.get_object_to_file(f'objs/print/{pid}/{pid}.jpg', os.path.join(path, f'{pid}Tex1.jpg')) + last_modified = oss_client.get_object_meta(f"objs/print/{pid}/{pid}.jpg").last_modified + print(f'jpg文件最后修改时间:{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(last_modified))}') + print(f'下载文件:objs/print/{pid}/{pid}.jpg,状态:{res.status}') + if oss_client.object_exists(f'objs/print/{pid}/{pid}.obj'): + res = oss_client.get_object_to_file(f'objs/print/{pid}/{pid}.obj', os.path.join(path, f'{pid}.obj')) + last_modified = oss_client.get_object_meta(f"objs/print/{pid}/{pid}.obj").last_modified + print(f'obj文件最后修改时间:{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(last_modified))}') + print(f'下载文件:objs/print/{pid}/{pid}.obj,状态:{res.status}') + else: + prefix = f'objs/print/{pid}/' + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + for file in filelist: + filename = file.key.split('/')[-1] + if filename == '': continue + if filename.endswith(f'.obj'): + res = oss_client.get_object_to_file(file.key, os.path.join(path, f'{pid}.obj')) + last_modified = oss_client.get_object_meta(file.key).last_modified + print(f'obj文件最后修改时间:{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(last_modified))}') + print(f'下载文件:{file.key},状态:{res.status}') + break +#查找obj +def find_obj(pid, order_id): + find = False + if os.path.exists(os.path.join(sourceFilePath, f'{pid}_{order_id}', f'{pid}.obj')): + + if not os.path.exists(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.obj')): + #复制文件到 指定目录 + shutil.copytree(os.path.join(sourceFilePath, f'{pid}_{order_id}'), os.path.join(workdir,f'{pid}_{order_id}')) + + # shutil.copy(f"/data/datasets/print/{pid}_{order_id}/{pid}.obj", os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.obj')) + # shutil.copy(f"/data/datasets/print/{pid}_{order_id}/{pid}.mtl", os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.mtl')) + # shutil.copy(f"/data/datasets/print/{pid}_{order_id}/{pid}.Tex1.jpg", os.path.join(workdir, f'{pid}_{order_id}', f'{pid}Tex1.jpg')) + else: + print('没有找到obj模型文件,开始下载') + down_obj_fromoss(pid, order_id=order_id) + if os.path.exists(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.jpg')): + shutil.move(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.jpg'), os.path.join(workdir, f'{pid}_{order_id}', f'{pid}Tex1.jpg')) + with open(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.mtl'), 'r') as f: + lines = f.readlines() + lines = [line.replace(f'map_Kd {pid}.jpg', f'map_Kd {pid}Tex1.jpg') for line in lines] + with open(os.path.join(workdir, f'{pid}_{order_id}', f'{pid}.mtl'), 'w') as f: + f.writelines(lines) + filelist = os.listdir(os.path.join(workdir, f'{pid}_{order_id}')) + for filename in filelist: + if f'{pid}.obj' in filename: + find = True + return filename + print('没有找到obj模型文件') + return '' + +def find_pid_objname(pid): + for obj in bpy.data.objects: + if obj.name.startswith(str(pid)): + return obj.name + +def ps_color_scale_adjustment(image, shadow=0, highlight=255, midtones=1): + ''' + 模拟 PS 的色阶调整; 0 <= Shadow < Highlight <= 255 + :param image: 传入的图片 + :param shadow: 黑场(0-Highlight) + :param highlight: 白场(Shadow-255) + :param midtones: 灰场(9.99-0.01) + :return: 图片 + ''' + if highlight > 255: + highlight = 255 + if shadow < 0: + shadow = 0 + if shadow >= highlight: + shadow = highlight - 2 + if midtones > 9.99: + midtones = 9.99 + if midtones < 0.01: + midtones = 0.01 + image = np.array(image, dtype=np.float16) + # 计算白场 黑场离差 + Diff = highlight - shadow + image = image - shadow + image[image < 0] = 0 + image = (image / Diff) ** (1 / midtones) * 255 + image[image > 255] = 255 + image = np.array(image, dtype=np.uint8) + + return image + + +def show_histogram(image, image_id, save_hist_dir, min_threshold, max_threshold): + ''' + 画出直方图展示 + :param image: 导入图片 + :param image_id: 图片id编号 + :param save_hist_dir: 保存路径 + :param min_threshold: 最小阈值 + :param max_threshold: 最大阈值 + :return: 原图image,和裁剪原图直方图高低阈值后的图片image_change + ''' + plt.rcParams['font.family'] = 'SimHei' + plt.rcParams['axes.unicode_minus'] = False + plt.hist(image.ravel(), 254, range=(2, 256), density=False) + plt.hist(image.ravel(), 96, range=(2, 50), density=False) # 放大 range(0, 50),bins值最好是range的两倍,显得更稀疏,便于对比 + plt.hist(image.ravel(), 110, range=(200, 255), density=False) # 放大 range(225, 255) + plt.annotate('thresh1=' + str(min_threshold), # 文本内容 + xy=(min_threshold, 0), # 箭头指向位置 # 阈值设定值! + xytext=(min_threshold, 500000), # 文本位置 # 阈值设定值! + arrowprops=dict(facecolor='black', width=1, shrink=5, headwidth=2)) # 箭头 + plt.annotate('thresh2=' + str(max_threshold), # 文本内容 + xy=(max_threshold, 0), # 箭头指向位置 # 阈值设定值! + xytext=(max_threshold, 500000), # 文本位置 # 阈值设定值! + arrowprops=dict(facecolor='black', width=1, shrink=5, headwidth=2)) # 箭头 + # 在y轴上绘制一条直线 + # plt.axhline(y=10000, color='r', linestyle='--', linewidth=0.5) + plt.title(str(image_id)) + # plt.show() + # 保存直方图 + save_hist_name = os.path.join(save_hist_dir, f'{image_id}_{min_threshold}&{max_threshold}.jpg') + plt.savefig(save_hist_name) + # 清空画布, 防止重叠展示 + plt.clf() + + +def low_find_histogram_range(image, target_frequency): + ''' + 循环查找在 target_frequency (y)频次限制下的直方图区间值(x) + :param image: 导入图片 + :param target_frequency: 直方图 y 频次限制条件 + :return: 直方图区间 x,和 该区间频次 y + ''' + # 计算灰度直方图 + hist, bins = np.histogram(image, bins=256, range=[0, 256]) + # 初始化区间和频次 + interval = 2 + frequency = hist[255] + while frequency < target_frequency: + # 更新区间和频次 + interval += 1 + # 检查直方图的频次是否为None,如果频次是None,则将其设为0,这样可以避免将None和int进行比较报错。 + frequency = hist[interval] if hist[interval] is not None else 0 + frequency += hist[interval] if hist[interval] is not None else 0 + # 如果频次接近10000则停止循环 + if target_frequency - 2000 <= frequency <= target_frequency + 1000: + break + + return interval, frequency + + +def high_find_histogram_range(image, target_frequency): + ''' + 循环查找在 target_frequency (y)频次限制下的直方图区间值(x) + :param image: 导入图片 + :param target_frequency: 直方图 y 频次限制条件 + :return: 直方图区间 x,和 该区间频次 y + ''' + # 计算灰度直方图 + hist, bins = np.histogram(image, bins=256, range=[0, 256]) + # 初始化区间和频次 + interval = 255 + frequency = hist[255] + while frequency < target_frequency: + # 更新区间和频次 + interval -= 1 + # 检查直方图的频次是否为None,如果频次是None,则将其设为0,这样可以避免将None和int进行比较报错。 + frequency = hist[interval] if hist[interval] is not None else 0 + frequency += hist[interval] if hist[interval] is not None else 0 + # 如果频次接近10000则停止循环 + if target_frequency - 2000 <= frequency <= target_frequency + 2000: + break + + return interval, frequency + +def reduce_sharpness(image, factor): + ''' + 使用PIL库减弱图像锐度 + :param image: 图像 + :param factor: 锐度因子,0表示最大程度减弱锐度,1表示原始图像 + :return: 减弱锐度后的图像 + ''' + # OpenCV 格式的图像转换为 PIL 的 Image 对象 + image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + pil_image = Image.fromarray(image_rgb) + enhancer = ImageEnhance.Sharpness(pil_image) + reduced_image = enhancer.enhance(factor) + # PIL 的 Image 对象转换为 OpenCV 的图像格式 + image_array = np.array(reduced_image) + sharpened_image = cv2.cvtColor(image_array, cv2.COLOR_RGB2BGR) + + return sharpened_image + +def sharpening_filter(image): + ''' + 锐化滤波器对图片进行锐化,增强图像中的边缘和细节 + :param image: 导入图片 + :return: 锐化后的图片 + ''' + sharp_kernel = np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]]) + sharpened_image = cv2.filter2D(image, -1, sharp_kernel) + return sharpened_image + +def find_last_x(image, slope_threshold = 1000): + x = [] + y = [] + hist, bins = np.histogram(image, bins=256, range=[0, 256]) + + #找到50以内的最高峰 + max_y = 0 + max_i = 5 + for i in range(5, 50): + if hist[i] > max_y: + max_y = hist[i] + max_i = i + print(f'50以内最高峰值y:{max_y},最高峰位置x:{max_i}') + + for i in range(2, max_i): + x.append(i) + y.append(hist[i]) + slopes = [abs(y[i + 1] - y[i]) for i in range(len(x) - 1)] + + current_interval = [] + max_interval = [] + max_x = {} + for i, slope in enumerate(slopes): + current_interval.append(slope) + if slope >= slope_threshold: + if len(current_interval) > len(max_interval): + max_interval = current_interval.copy() + max_x[x[i]] = slope + current_interval = [] + + print(max_x) + last_x = list(max_x)[-1] + last_y = max_x[last_x] + return last_x, last_y + +def find_last_high(image, slope_threshold = 2500): + x = [] + y = [] + hist, bins = np.histogram(image, bins=255, range=[2, 255]) + + #找到200以上的最高峰 + max_y = 0 + max_i = 254 + for i in range(220, 255): + if hist[i] > max_y: + max_y = hist[i] + max_i = i + print(f'200以上的最高峰值y:{max_y},最高峰位置x:{max_i}') + + for i in range(max_i, 255): + x.append(i) + y.append(hist[i]) + slopes = [abs(y[i + 1] - y[i]) for i in range(len(x) - 1)] + + current_interval = [] + max_interval = [] + max_x = {} + find = False + for i in range(len(slopes) - 1, -1, -1): + slope = slopes[i] + current_interval.append(slope) + if slope >= slope_threshold: + find = True + if len(current_interval) > len(max_interval): + max_interval = current_interval.copy() + max_x[x[i]] = slope + current_interval = [] + #如果没有找到200以上很平,而且高度小于5000,就按220位置削平 + if not find and hist[220] < 5000: + max_x[220] = hist[220] + + print(max_x) + if len(max_x) > 0: + last_x = list(max_x)[0] + last_y = max_x[last_x] + else: + print(f'找不到200以上曲线较平的区间,使用254作为最高峰') + last_x = 254 + last_y = hist[254] + return last_x, last_y + +def remove_gray_and_sharpening(jpg_path): + input_image = cv2.imread(jpg_path) + # low_x_thresh, low_y_frequency = low_find_histogram_range(input_image, low_y_limit) + low_x_thresh, low_y_frequency = find_last_x(input_image) + # high_x_thresh, high_y_frequency = high_find_histogram_range(input_image, high_y_limit) + high_x_thresh, high_y_frequency = find_last_high(input_image) + print(f"{low_x_thresh} 区间, {low_y_frequency} 频次") + print(f"{high_x_thresh} 区间, {high_y_frequency} 频次") + high_output_image = ps_color_scale_adjustment(input_image, shadow=low_x_thresh, highlight=high_x_thresh, midtones=1) + # high_output_image = ps_color_scale_adjustment(low_ouput_image, shadow=0, highlight=high_x_thresh, midtones=1) + + # # 人体贴图和黑色背景交界处不进行锐化 + # gray = cv2.cvtColor(input_image, cv2.COLOR_BGR2GRAY) + # _, thresh = cv2.threshold(gray, 2, 255, cv2.THRESH_BINARY) + # kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (7, 7)) + # gradient = cv2.morphologyEx(thresh, cv2.MORPH_GRADIENT, kernel) + # roi_gradient = cv2.bitwise_and(high_output_image, high_output_image, mask=gradient) + + # # 锐化滤波器 + # # sharpened_image = sharpening_filter(high_output_image) + # sharpened_image = reduce_sharpness(high_output_image, factor=4) + # # 将原图边界替换锐化后的图片边界 + # sharpened_image[gradient != 0] = roi_gradient[gradient != 0] + + # 直方图标记并保存 + # show_histogram(input_image, img_id, low_x_thresh, high_x_thresh) + cv2.imwrite(jpg_path, high_output_image, [cv2.IMWRITE_JPEG_QUALITY, 95]) # 保存图片的质量是原图的 95% + +def main(workdir, r, print_id): + print('脚底板二维码程序开始运行...') + only_one = False + while True: + #随机休眠 1- 9 + time.sleep(random.uniform(1, 9)) + if print_id == '0': + try: + if r.llen('model:foot') == 0: + # print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), '队列为空,5秒后重试') + time.sleep(5) + continue + except Exception as e: + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), 'redis连接异常,5秒后重试') + print(e) + time.sleep(5) + r = redis.Redis(host='106.14.158.208', password='kcV2000', port=6379, db=6) + # r = redis.Redis(host='172.31.1.254', password='', port=6379, db=6) + # continue + # 打印队列里面的全部内容 + print(f'当前model:foot队列长度:{r.llen("model:foot")}') + for i in r.lrange('model:foot', 0, -1): + print(i) + print_id = r.lpop('model:foot') + if print_id is None: + print_id = '0' + continue + #判断是否存在相同的值 + isHaveAlready = 0 + for i in r.lrange('model:foot', 0, -1): + if i == print_id: + isHaveAlready = 1 + + if isHaveAlready == 1: + print_id = '0' + continue + + print_id = print_id.decode('utf-8') + else: + print(f'接收到运行一个{print_id}任务') + only_one = True + + res = requests.get(f'{get_pid_by_printid_url}?print_id={print_id}') + print('获取pid:', f'{get_pid_by_printid_url}?print_id={print_id}', res.text) + resCode = json.loads(res.text)['code'] + #该笔订单的获取信息有误,可能信息还没有成功 + if int(resCode) != 1000: + tempMesg = json.loads(res.text)['message'] + #判断是否包含 不存在 + if tempMesg == "打印订单不存在": + print(f"打印ID{print_id}打印订单不存在,跳过") + # res = requests.get(f'https://mp.api.suwa3d.com/api/footCode/deleteByPid?pid={pid}') + os.system(f'blender -b -P fill_dm_code.py') + return + + print("获取打印任务信息有问题,重新任务队列,打印id-",print_id,"重新执行脚底板任务,等待20s") + time.sleep(20) + #将pid 重新扔进队列 + r.lpush("model:foot", print_id) + #重新调用脚底板程序 + #os.system(f'blender -b -P fill_dm_code.py') + continue + use_foot_type = json.loads(res.text)['data']['use_foot_type'] + pid = json.loads(res.text)['data']['pid'] + order_id = json.loads(res.text)['data']['order_id'] + + filename = os.path.join(workdir, f'{pid}_{order_id}', find_obj(pid, order_id)) + print('导入obj文件:', filename) + # return + if only_one: + print(f'接收到运行一个{print_id}任务,强制调用cal_foot_position.py计算并上传qr_position') + os.system(f'blender -b -P cal_foot_position.py -- {pid}_{order_id}_{print_id}') + print("延时20s,等待计算脚底板坐标") + time.sleep(20) + res = requests.get(f'{get_qr_position_url}?print_id={print_id}') + print('从云端获取的qr_position1:', res.text) + codeTemp = json.loads(res.text)['code'] + if str(codeTemp) == "-1": + #移除该脚底板的面积处理 + res = requests.get(f'https://mp.api.suwa3d.com/api/footCode/deleteByPid?pid={pid}') + os.system(f'blender -b -P fill_dm_code.py') + return + qr_position = json.loads(res.text)['data']['position_data'] + else: + #从云端获取qr_position,如果获取为空,调用cal_foot_position.py计算并上传qr_position,再重新读取qr_position.txt + res = requests.get(f'{get_qr_position_url}?print_id={print_id}') + print('从云端获取的qr_position2:', res.text) + + codeTemp = json.loads(res.text)['code'] + if str(codeTemp) == "-1": + continue + + qr_position = json.loads(res.text)['data']['position_data'] + print("云端获取的坐标数据",qr_position) + if qr_position == '': + time.sleep(3) + print('qr_position为空,调用cal_foot_position.py计算并上传qr_position') + os.system(f'blender -b -P cal_foot_position.py -- {pid}_{order_id}_{print_id}') + print("延时20s,等待计算脚底板坐标") + time.sleep(20) + res = requests.get(f'{get_qr_position_url}?print_id={print_id}') + print('从云端获取的qr_position3:', res.text) + qr_position = json.loads(res.text)['data']['position_data'] + else: + qr_position = json.loads(qr_position) + + if qr_position == "": + print("获取脚底坐标数据为空,重新任务队列,打印id-",print_id,"重新执行脚底板任务") + #将pid 重新扔进队列 + r.lpush("model:foot", print_id) + #重新调用脚底板程序 + #os.system(f'blender -b -P fill_dm_code.py') + continue + + if type(qr_position) == str: qr_position = json.loads(qr_position) + print(f'type of qr_position:{type(qr_position)}') + print(f'qr_position:{qr_position}') + + qr_position['location'][2] = -0.1 + + temp_foot_data = print_id + # 根据print_id生成qr码 + qr_path = os.path.join(workdir, f'{pid}_{order_id}' ,'qr.png') + # if use_foot_type == "short_url": + # #调用接口获取短网址信息 + # short_url = get_short_url(print_id) + # if short_url == False: + # return + # temp_foot_data = short_url + # print(f'temp_foot_data---{temp_foot_data}') + gen_data_matrix(print_id,pid, qr_path) + + + # 导入obj文件,重置到标准单位 + bpy.ops.wm.read_homefile() + bpy.context.preferences.view.language = 'en_US' + bpy.ops.object.delete(use_global=False, confirm=False) + bpy.context.scene.unit_settings.scale_length = 0.001 + bpy.context.scene.unit_settings.length_unit = 'CENTIMETERS' + bpy.context.scene.unit_settings.mass_unit = 'GRAMS' + + bpy.ops.import_scene.obj(filepath=filename) + #bpy.ops.wm.obj_import(filepath=filename) + obj = bpy.context.selected_objects[0] + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + + pid_objname = find_pid_objname(pid) + + scale = 90 / obj.dimensions.y + obj.scale = (scale, scale, scale) + bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_1', align_axis={'Z'}) + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + obj.location[0] = 0 + obj.location[1] = 0 + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + + print(f'qr_position:{qr_position}') + print(f'qr_position_type:{type(qr_position)}') + + # 根据qr_position的值,恢复qr的位置和尺寸,重新生成贴图 + bpy.ops.object.load_reference_image(filepath=os.path.join(workdir, f'{pid}_{order_id}', 'qr.png')) + bpy.context.object.rotation_euler = (math.radians(-180), math.radians(0), qr_position['rotation']) + bpy.ops.transform.translate(value=qr_position['location'], orient_type='GLOBAL', orient_matrix=((1, 0, 0), (0, 1, 0), (0, 0, 1)), orient_matrix_type='GLOBAL', mirror=False, use_proportional_edit=False, proportional_edit_falloff='SMOOTH', proportional_size=1, use_proportional_connected=False, use_proportional_projected=False, snap=False, snap_elements={'INCREMENT'}, use_snap_project=False, snap_target='CLOSEST', use_snap_self=True, use_snap_edit=True, use_snap_nonedit=True, use_snap_selectable=False, release_confirm=True) + + bpy.context.object.empty_display_size = qr_position['dimensions'][0] + + qr_path = os.path.join(workdir,f'{pid}_{order_id}', f"{pid}_{order_id}Tex1_qr.png") + jpg_path = os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1.jpg") + #判断是否存在 jpg_path,不存在就执行下一个任务 + if not os.path.exists(jpg_path): + #移除该脚底板的面积处理 + res = requests.get(f'https://mp.api.suwa3d.com/api/footCode/deleteByPid?pid={pid}') + os.system(f'blender -b -P fill_dm_code.py') + return + + jpg_img = Image.open(jpg_path) + shutil.copyfile(jpg_path, os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1_noqr.jpg")) + + bpy.context.scene.eyek.res_x = jpg_img.width + bpy.context.scene.eyek.res_y = jpg_img.height + bpy.context.scene.eyek.path_export_image = qr_path + bpy.data.objects[pid_objname].select_set(True) + bpy.data.objects['Empty'].select_set(True) + bpy.context.view_layer.objects.active = bpy.data.objects[pid_objname] + bpy.ops.eyek.exe() + + qr_img = Image.open(qr_path) + jpg_img.paste(qr_img, (0, 0), qr_img) + jpg_img.save(jpg_path, quality=90) + shutil.copyfile(jpg_path, os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1_qr.jpg")) + + # 加入去灰、锐化 + remove_gray_and_sharpening(jpg_path) + + #上传脚底板文件 -》 修改为移动脚底板文件 + upload_jpg_mtl(pid, order_id, print_id) + + # plt.axis('equal') + # plt.show() + + # 保存blend文件 + # bpy.ops.wm.save_as_mainfile(filepath=f'{workdir}{pid}_{order_id}/{pid}_qr_end.blend') + bpy.ops.wm.quit_blender() + + # 删除临时文件 + print("workdirworkdir",workdir) + shutil.rmtree(os.path.join(workdir, f'{pid}_{order_id}')) + if only_one: + print(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} 运行{print_id}任务完成,退出程序') + break + else: + print(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} 运行{print_id}任务完成,继续运行下一个任务') + print_id = '0' + continue + + restart_current_process("blender -b -P fill_dm_code.py") + +#根据print_id 获取 短网址 +def get_short_url(print_id): + res = requests.get(f'{get_short_url_by_print_id}?print_id={print_id}') + resCode = json.loads(res.text)['code'] + #该笔订单的获取信息有误,可能信息还没有成功 + if int(resCode) != 1000: + print("获取短网址信息失败,延时1分钟,重新扔入脚底板队列,打印id-",print_id,"重新执行脚底板任务") + time.sleep(60) + #将pid 重新扔进队列 + r.lpush("model:foot", print_id) + #重新调用脚底板程序 + return False + return json.loads(res.text)['data'] + + +@retry(stop_max_attempt_number=10, wait_fixed=2000) +def upload_jpg_mtl(pid, order_id, print_id): + try: + print("移动脚底板文件") + #判断文件夹是否存在,不存在则创建 + if not os.path.exists(os.path.join(f'{resFilePath}/{pid}/foot_print_id_{print_id}/')): + os.makedirs(os.path.join(f'{resFilePath}/{pid}/foot_print_id_{print_id}/'),mode=0o777, exist_ok=True) + else: + #移除路径下的文件 + if os.path.exists(os.path.join(f'{resFilePath}/{pid}/foot_print_id_{print_id}/{pid}Tex1.jpg')): + os.remove(os.path.join(f'{resFilePath}/{pid}/foot_print_id_{print_id}/{pid}Tex1.jpg')) + print(f"文件 '{pid}' 已成功删除。") + + shutil.move(os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1.jpg"),os.path.join(f'{resFilePath}/{pid}/foot_print_id_{print_id}/')) + + + #print('生成贴图完成,开始上传...') + # oss_client.put_object_from_file(f'objs/print/{pid}/{pid}Tex1.{print_id}.jpg', os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1.jpg")) + # oss_client.put_object_from_file(f'objs/print/{pid}/{pid}.mtl', os.path.join(workdir,f'{pid}_{order_id}', f"{pid}.mtl")) + # # oss_client.put_object_from_file(f'objs/print/{pid}/{pid}Tex1_noqr.jpg', os.path.join(workdir,f'{pid}_{order_id}', f"{pid}Tex1_noqr.jpg")) + + print('更新状态为已生成脚底板二维码') + res = requests.post(f'{upload_qr_position_url}?print_id={print_id}') + # #记录日志 + # logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())}-结果:pid:{pid}-print_id:{print_id} {str(res.text)}") + print('更新返回状态:', f'{upload_qr_position_url}?print_id={print_id}', res.text) + + except Exception as e: + + print("迁移文件出 yichang ") + + +if __name__ == '__main__': + atexit.register(common.notify,"打印工厂-本地虚拟木脚底板处理程序已停止一个") + low_y_limit = 25000 + high_y_limit = 13000 + + get_qr_position_url = 'https://mp.api.suwa3d.com/api/printOrder/getFootCodePositionData' + upload_qr_position_url = 'https://mp.api.suwa3d.com/api/printOrder/updateFootCodeStatus' + get_pid_by_printid_url = 'https://mp.api.suwa3d.com/api/printOrder/getPidByPrintId' + get_short_url_by_print_id = 'https://mp.api.suwa3d.com/api/footCode/qrcode' + # get_qr_position_url = 'http://172.31.1.254:8199/api/printOrder/getFootCodePositionData' + # upload_qr_position_url = 'http://172.31.1.254:8199/api/printOrder/updateFootCodeStatus' + # get_pid_by_printid_url = 'http://172.31.1.254:8199/api/printOrder/getPidByPrintId' + + + r = redis.Redis(host='106.14.158.208', password='kcV2000', port=6379, db=6) + # r = redis.Redis(host='172.31.1.254', password='', port=6379, db=6) + AccessKeyId = 'LTAI5tSReWm8hz7dSYxxth8f' + AccessKeySecret = '8ywTDF9upPAtvgXtLKALY2iMYHIxdS' + Endpoint = 'oss-cn-shanghai.aliyuncs.com' + Bucket = 'suwa3d-securedata' + oss_client = oss2.Bucket(oss2.Auth(AccessKeyId, AccessKeySecret), Endpoint, Bucket) + + if platform.system() == 'Windows': + workdir = 'E:\\print\\foot\\' + else: + workdir = '/data/datasets/foot/' + + #构建好的obj文件目录 + sourceFilePath = '/data/datasets/print' + #脚底板最终保存的路径 + resFilePath = '/data/datasets/complate/objs' + + print("Usage: blender -b -P fill_dm_code.py") + + + if len(sys.argv) == 5: + print_ids = sys.argv[-1] + else: + print_ids = '0' + + print(f"print_ids--{print_ids}") + for print_id in print_ids.split(','): + main(workdir, r, print_id) + + diff --git a/blender/foot_update_res.log b/blender/foot_update_res.log new file mode 100644 index 0000000..e69de29 diff --git a/blender/lib/libIex.so b/blender/lib/libIex.so new file mode 120000 index 0000000..e615a73 --- /dev/null +++ b/blender/lib/libIex.so @@ -0,0 +1 @@ +libIex.so.30 \ No newline at end of file diff --git a/blender/lib/libIex.so.30 b/blender/lib/libIex.so.30 new file mode 120000 index 0000000..f44a558 --- /dev/null +++ b/blender/lib/libIex.so.30 @@ -0,0 +1 @@ +libIex.so.30.7.1 \ No newline at end of file diff --git a/blender/lib/libIex.so.30.7.1 b/blender/lib/libIex.so.30.7.1 new file mode 100644 index 0000000..a13e505 Binary files /dev/null and b/blender/lib/libIex.so.30.7.1 differ diff --git a/blender/lib/libIlmThread.so b/blender/lib/libIlmThread.so new file mode 120000 index 0000000..54fd007 --- /dev/null +++ b/blender/lib/libIlmThread.so @@ -0,0 +1 @@ +libIlmThread.so.30 \ No newline at end of file diff --git a/blender/lib/libIlmThread.so.30 b/blender/lib/libIlmThread.so.30 new file mode 120000 index 0000000..f41b380 --- /dev/null +++ b/blender/lib/libIlmThread.so.30 @@ -0,0 +1 @@ +libIlmThread.so.30.7.1 \ No newline at end of file diff --git a/blender/lib/libIlmThread.so.30.7.1 b/blender/lib/libIlmThread.so.30.7.1 new file mode 100644 index 0000000..ad09349 Binary files /dev/null and b/blender/lib/libIlmThread.so.30.7.1 differ diff --git a/blender/lib/libImath.so b/blender/lib/libImath.so new file mode 120000 index 0000000..a7df9ba --- /dev/null +++ b/blender/lib/libImath.so @@ -0,0 +1 @@ +libImath.so.30 \ No newline at end of file diff --git a/blender/lib/libImath.so.30 b/blender/lib/libImath.so.30 new file mode 120000 index 0000000..289d8cb --- /dev/null +++ b/blender/lib/libImath.so.30 @@ -0,0 +1 @@ +libImath.so.30.0.1 \ No newline at end of file diff --git a/blender/lib/libImath.so.30.0.1 b/blender/lib/libImath.so.30.0.1 new file mode 100644 index 0000000..fcf35d8 Binary files /dev/null and b/blender/lib/libImath.so.30.0.1 differ diff --git a/blender/lib/libMaterialXCore.so b/blender/lib/libMaterialXCore.so new file mode 120000 index 0000000..224422e --- /dev/null +++ b/blender/lib/libMaterialXCore.so @@ -0,0 +1 @@ +libMaterialXCore.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXCore.so.1 b/blender/lib/libMaterialXCore.so.1 new file mode 120000 index 0000000..77d1936 --- /dev/null +++ b/blender/lib/libMaterialXCore.so.1 @@ -0,0 +1 @@ +libMaterialXCore.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXCore.so.1.38.6 b/blender/lib/libMaterialXCore.so.1.38.6 new file mode 100644 index 0000000..f98a606 Binary files /dev/null and b/blender/lib/libMaterialXCore.so.1.38.6 differ diff --git a/blender/lib/libMaterialXFormat.so b/blender/lib/libMaterialXFormat.so new file mode 120000 index 0000000..5b56d92 --- /dev/null +++ b/blender/lib/libMaterialXFormat.so @@ -0,0 +1 @@ +libMaterialXFormat.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXFormat.so.1 b/blender/lib/libMaterialXFormat.so.1 new file mode 120000 index 0000000..ce909e4 --- /dev/null +++ b/blender/lib/libMaterialXFormat.so.1 @@ -0,0 +1 @@ +libMaterialXFormat.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXFormat.so.1.38.6 b/blender/lib/libMaterialXFormat.so.1.38.6 new file mode 100644 index 0000000..6aea26e Binary files /dev/null and b/blender/lib/libMaterialXFormat.so.1.38.6 differ diff --git a/blender/lib/libMaterialXGenGlsl.so b/blender/lib/libMaterialXGenGlsl.so new file mode 120000 index 0000000..c8e7d25 --- /dev/null +++ b/blender/lib/libMaterialXGenGlsl.so @@ -0,0 +1 @@ +libMaterialXGenGlsl.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenGlsl.so.1 b/blender/lib/libMaterialXGenGlsl.so.1 new file mode 120000 index 0000000..ce56d46 --- /dev/null +++ b/blender/lib/libMaterialXGenGlsl.so.1 @@ -0,0 +1 @@ +libMaterialXGenGlsl.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenGlsl.so.1.38.6 b/blender/lib/libMaterialXGenGlsl.so.1.38.6 new file mode 100644 index 0000000..6b32f82 Binary files /dev/null and b/blender/lib/libMaterialXGenGlsl.so.1.38.6 differ diff --git a/blender/lib/libMaterialXGenMdl.so b/blender/lib/libMaterialXGenMdl.so new file mode 120000 index 0000000..20bc543 --- /dev/null +++ b/blender/lib/libMaterialXGenMdl.so @@ -0,0 +1 @@ +libMaterialXGenMdl.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenMdl.so.1 b/blender/lib/libMaterialXGenMdl.so.1 new file mode 120000 index 0000000..ee6a73f --- /dev/null +++ b/blender/lib/libMaterialXGenMdl.so.1 @@ -0,0 +1 @@ +libMaterialXGenMdl.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenMdl.so.1.38.6 b/blender/lib/libMaterialXGenMdl.so.1.38.6 new file mode 100644 index 0000000..90afa48 Binary files /dev/null and b/blender/lib/libMaterialXGenMdl.so.1.38.6 differ diff --git a/blender/lib/libMaterialXGenOsl.so b/blender/lib/libMaterialXGenOsl.so new file mode 120000 index 0000000..7d4766d --- /dev/null +++ b/blender/lib/libMaterialXGenOsl.so @@ -0,0 +1 @@ +libMaterialXGenOsl.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenOsl.so.1 b/blender/lib/libMaterialXGenOsl.so.1 new file mode 120000 index 0000000..077aef5 --- /dev/null +++ b/blender/lib/libMaterialXGenOsl.so.1 @@ -0,0 +1 @@ +libMaterialXGenOsl.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenOsl.so.1.38.6 b/blender/lib/libMaterialXGenOsl.so.1.38.6 new file mode 100644 index 0000000..40d7254 Binary files /dev/null and b/blender/lib/libMaterialXGenOsl.so.1.38.6 differ diff --git a/blender/lib/libMaterialXGenShader.so b/blender/lib/libMaterialXGenShader.so new file mode 120000 index 0000000..9c76d38 --- /dev/null +++ b/blender/lib/libMaterialXGenShader.so @@ -0,0 +1 @@ +libMaterialXGenShader.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenShader.so.1 b/blender/lib/libMaterialXGenShader.so.1 new file mode 120000 index 0000000..6841c2d --- /dev/null +++ b/blender/lib/libMaterialXGenShader.so.1 @@ -0,0 +1 @@ +libMaterialXGenShader.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXGenShader.so.1.38.6 b/blender/lib/libMaterialXGenShader.so.1.38.6 new file mode 100644 index 0000000..56cb5f5 Binary files /dev/null and b/blender/lib/libMaterialXGenShader.so.1.38.6 differ diff --git a/blender/lib/libMaterialXRender.so b/blender/lib/libMaterialXRender.so new file mode 120000 index 0000000..6836db0 --- /dev/null +++ b/blender/lib/libMaterialXRender.so @@ -0,0 +1 @@ +libMaterialXRender.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXRender.so.1 b/blender/lib/libMaterialXRender.so.1 new file mode 120000 index 0000000..888820e --- /dev/null +++ b/blender/lib/libMaterialXRender.so.1 @@ -0,0 +1 @@ +libMaterialXRender.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXRender.so.1.38.6 b/blender/lib/libMaterialXRender.so.1.38.6 new file mode 100644 index 0000000..f2b33ac Binary files /dev/null and b/blender/lib/libMaterialXRender.so.1.38.6 differ diff --git a/blender/lib/libMaterialXRenderGlsl.so b/blender/lib/libMaterialXRenderGlsl.so new file mode 120000 index 0000000..34c81d0 --- /dev/null +++ b/blender/lib/libMaterialXRenderGlsl.so @@ -0,0 +1 @@ +libMaterialXRenderGlsl.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXRenderGlsl.so.1 b/blender/lib/libMaterialXRenderGlsl.so.1 new file mode 120000 index 0000000..212e6ff --- /dev/null +++ b/blender/lib/libMaterialXRenderGlsl.so.1 @@ -0,0 +1 @@ +libMaterialXRenderGlsl.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXRenderGlsl.so.1.38.6 b/blender/lib/libMaterialXRenderGlsl.so.1.38.6 new file mode 100644 index 0000000..82aa096 Binary files /dev/null and b/blender/lib/libMaterialXRenderGlsl.so.1.38.6 differ diff --git a/blender/lib/libMaterialXRenderHw.so b/blender/lib/libMaterialXRenderHw.so new file mode 120000 index 0000000..2dae0fd --- /dev/null +++ b/blender/lib/libMaterialXRenderHw.so @@ -0,0 +1 @@ +libMaterialXRenderHw.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXRenderHw.so.1 b/blender/lib/libMaterialXRenderHw.so.1 new file mode 120000 index 0000000..2e41069 --- /dev/null +++ b/blender/lib/libMaterialXRenderHw.so.1 @@ -0,0 +1 @@ +libMaterialXRenderHw.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXRenderHw.so.1.38.6 b/blender/lib/libMaterialXRenderHw.so.1.38.6 new file mode 100644 index 0000000..9f18bff Binary files /dev/null and b/blender/lib/libMaterialXRenderHw.so.1.38.6 differ diff --git a/blender/lib/libMaterialXRenderOsl.so b/blender/lib/libMaterialXRenderOsl.so new file mode 120000 index 0000000..53425b5 --- /dev/null +++ b/blender/lib/libMaterialXRenderOsl.so @@ -0,0 +1 @@ +libMaterialXRenderOsl.so.1 \ No newline at end of file diff --git a/blender/lib/libMaterialXRenderOsl.so.1 b/blender/lib/libMaterialXRenderOsl.so.1 new file mode 120000 index 0000000..5956695 --- /dev/null +++ b/blender/lib/libMaterialXRenderOsl.so.1 @@ -0,0 +1 @@ +libMaterialXRenderOsl.so.1.38.6 \ No newline at end of file diff --git a/blender/lib/libMaterialXRenderOsl.so.1.38.6 b/blender/lib/libMaterialXRenderOsl.so.1.38.6 new file mode 100644 index 0000000..72bea49 Binary files /dev/null and b/blender/lib/libMaterialXRenderOsl.so.1.38.6 differ diff --git a/blender/lib/libOpenColorIO.so b/blender/lib/libOpenColorIO.so new file mode 120000 index 0000000..12af79b --- /dev/null +++ b/blender/lib/libOpenColorIO.so @@ -0,0 +1 @@ +libOpenColorIO.so.2.2 \ No newline at end of file diff --git a/blender/lib/libOpenColorIO.so.2.2 b/blender/lib/libOpenColorIO.so.2.2 new file mode 120000 index 0000000..58abb78 --- /dev/null +++ b/blender/lib/libOpenColorIO.so.2.2 @@ -0,0 +1 @@ +libOpenColorIO.so.2.2.0 \ No newline at end of file diff --git a/blender/lib/libOpenColorIO.so.2.2.0 b/blender/lib/libOpenColorIO.so.2.2.0 new file mode 100644 index 0000000..039d062 Binary files /dev/null and b/blender/lib/libOpenColorIO.so.2.2.0 differ diff --git a/blender/lib/libOpenEXR.so b/blender/lib/libOpenEXR.so new file mode 120000 index 0000000..65030d9 --- /dev/null +++ b/blender/lib/libOpenEXR.so @@ -0,0 +1 @@ +libOpenEXR.so.30 \ No newline at end of file diff --git a/blender/lib/libOpenEXR.so.30 b/blender/lib/libOpenEXR.so.30 new file mode 120000 index 0000000..e29f763 --- /dev/null +++ b/blender/lib/libOpenEXR.so.30 @@ -0,0 +1 @@ +libOpenEXR.so.30.7.1 \ No newline at end of file diff --git a/blender/lib/libOpenEXR.so.30.7.1 b/blender/lib/libOpenEXR.so.30.7.1 new file mode 100644 index 0000000..182da70 Binary files /dev/null and b/blender/lib/libOpenEXR.so.30.7.1 differ diff --git a/blender/lib/libOpenEXRCore.so b/blender/lib/libOpenEXRCore.so new file mode 120000 index 0000000..bee7844 --- /dev/null +++ b/blender/lib/libOpenEXRCore.so @@ -0,0 +1 @@ +libOpenEXRCore.so.30 \ No newline at end of file diff --git a/blender/lib/libOpenEXRCore.so.30 b/blender/lib/libOpenEXRCore.so.30 new file mode 120000 index 0000000..93f71ab --- /dev/null +++ b/blender/lib/libOpenEXRCore.so.30 @@ -0,0 +1 @@ +libOpenEXRCore.so.30.7.1 \ No newline at end of file diff --git a/blender/lib/libOpenEXRCore.so.30.7.1 b/blender/lib/libOpenEXRCore.so.30.7.1 new file mode 100644 index 0000000..a9577be Binary files /dev/null and b/blender/lib/libOpenEXRCore.so.30.7.1 differ diff --git a/blender/lib/libOpenEXRUtil.so b/blender/lib/libOpenEXRUtil.so new file mode 120000 index 0000000..df2683d --- /dev/null +++ b/blender/lib/libOpenEXRUtil.so @@ -0,0 +1 @@ +libOpenEXRUtil.so.30 \ No newline at end of file diff --git a/blender/lib/libOpenEXRUtil.so.30 b/blender/lib/libOpenEXRUtil.so.30 new file mode 120000 index 0000000..f83249b --- /dev/null +++ b/blender/lib/libOpenEXRUtil.so.30 @@ -0,0 +1 @@ +libOpenEXRUtil.so.30.7.1 \ No newline at end of file diff --git a/blender/lib/libOpenEXRUtil.so.30.7.1 b/blender/lib/libOpenEXRUtil.so.30.7.1 new file mode 100644 index 0000000..c6dec93 Binary files /dev/null and b/blender/lib/libOpenEXRUtil.so.30.7.1 differ diff --git a/blender/lib/libOpenImageIO.so b/blender/lib/libOpenImageIO.so new file mode 120000 index 0000000..f647811 --- /dev/null +++ b/blender/lib/libOpenImageIO.so @@ -0,0 +1 @@ +libOpenImageIO.so.2.4 \ No newline at end of file diff --git a/blender/lib/libOpenImageIO.so.2.4 b/blender/lib/libOpenImageIO.so.2.4 new file mode 120000 index 0000000..f975417 --- /dev/null +++ b/blender/lib/libOpenImageIO.so.2.4 @@ -0,0 +1 @@ +libOpenImageIO.so.2.4.11 \ No newline at end of file diff --git a/blender/lib/libOpenImageIO.so.2.4.11 b/blender/lib/libOpenImageIO.so.2.4.11 new file mode 100644 index 0000000..e616e24 Binary files /dev/null and b/blender/lib/libOpenImageIO.so.2.4.11 differ diff --git a/blender/lib/libOpenImageIO_Util.so b/blender/lib/libOpenImageIO_Util.so new file mode 120000 index 0000000..f585ba0 --- /dev/null +++ b/blender/lib/libOpenImageIO_Util.so @@ -0,0 +1 @@ +libOpenImageIO_Util.so.2.4 \ No newline at end of file diff --git a/blender/lib/libOpenImageIO_Util.so.2.4 b/blender/lib/libOpenImageIO_Util.so.2.4 new file mode 120000 index 0000000..20a77aa --- /dev/null +++ b/blender/lib/libOpenImageIO_Util.so.2.4 @@ -0,0 +1 @@ +libOpenImageIO_Util.so.2.4.11 \ No newline at end of file diff --git a/blender/lib/libOpenImageIO_Util.so.2.4.11 b/blender/lib/libOpenImageIO_Util.so.2.4.11 new file mode 100644 index 0000000..bcd13d2 Binary files /dev/null and b/blender/lib/libOpenImageIO_Util.so.2.4.11 differ diff --git a/blender/lib/libboost_atomic.so b/blender/lib/libboost_atomic.so new file mode 120000 index 0000000..627ee80 --- /dev/null +++ b/blender/lib/libboost_atomic.so @@ -0,0 +1 @@ +libboost_atomic.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_atomic.so.1.80.0 b/blender/lib/libboost_atomic.so.1.80.0 new file mode 100644 index 0000000..c0a59f9 Binary files /dev/null and b/blender/lib/libboost_atomic.so.1.80.0 differ diff --git a/blender/lib/libboost_chrono.so b/blender/lib/libboost_chrono.so new file mode 120000 index 0000000..7430767 --- /dev/null +++ b/blender/lib/libboost_chrono.so @@ -0,0 +1 @@ +libboost_chrono.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_chrono.so.1.80.0 b/blender/lib/libboost_chrono.so.1.80.0 new file mode 100644 index 0000000..e43c92a Binary files /dev/null and b/blender/lib/libboost_chrono.so.1.80.0 differ diff --git a/blender/lib/libboost_date_time.so b/blender/lib/libboost_date_time.so new file mode 120000 index 0000000..b33293f --- /dev/null +++ b/blender/lib/libboost_date_time.so @@ -0,0 +1 @@ +libboost_date_time.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_date_time.so.1.80.0 b/blender/lib/libboost_date_time.so.1.80.0 new file mode 100644 index 0000000..128ac8c Binary files /dev/null and b/blender/lib/libboost_date_time.so.1.80.0 differ diff --git a/blender/lib/libboost_filesystem.so b/blender/lib/libboost_filesystem.so new file mode 120000 index 0000000..3bc1fe8 --- /dev/null +++ b/blender/lib/libboost_filesystem.so @@ -0,0 +1 @@ +libboost_filesystem.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_filesystem.so.1.80.0 b/blender/lib/libboost_filesystem.so.1.80.0 new file mode 100644 index 0000000..cefca2f Binary files /dev/null and b/blender/lib/libboost_filesystem.so.1.80.0 differ diff --git a/blender/lib/libboost_iostreams.so b/blender/lib/libboost_iostreams.so new file mode 120000 index 0000000..414f647 --- /dev/null +++ b/blender/lib/libboost_iostreams.so @@ -0,0 +1 @@ +libboost_iostreams.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_iostreams.so.1.80.0 b/blender/lib/libboost_iostreams.so.1.80.0 new file mode 100644 index 0000000..6fa51b7 Binary files /dev/null and b/blender/lib/libboost_iostreams.so.1.80.0 differ diff --git a/blender/lib/libboost_locale.so b/blender/lib/libboost_locale.so new file mode 120000 index 0000000..5f9d704 --- /dev/null +++ b/blender/lib/libboost_locale.so @@ -0,0 +1 @@ +libboost_locale.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_locale.so.1.80.0 b/blender/lib/libboost_locale.so.1.80.0 new file mode 100644 index 0000000..c555b15 Binary files /dev/null and b/blender/lib/libboost_locale.so.1.80.0 differ diff --git a/blender/lib/libboost_numpy310.so b/blender/lib/libboost_numpy310.so new file mode 120000 index 0000000..976c03b --- /dev/null +++ b/blender/lib/libboost_numpy310.so @@ -0,0 +1 @@ +libboost_numpy310.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_numpy310.so.1.80.0 b/blender/lib/libboost_numpy310.so.1.80.0 new file mode 100644 index 0000000..991265c Binary files /dev/null and b/blender/lib/libboost_numpy310.so.1.80.0 differ diff --git a/blender/lib/libboost_program_options.so b/blender/lib/libboost_program_options.so new file mode 120000 index 0000000..09830cf --- /dev/null +++ b/blender/lib/libboost_program_options.so @@ -0,0 +1 @@ +libboost_program_options.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_program_options.so.1.80.0 b/blender/lib/libboost_program_options.so.1.80.0 new file mode 100644 index 0000000..daa5531 Binary files /dev/null and b/blender/lib/libboost_program_options.so.1.80.0 differ diff --git a/blender/lib/libboost_python310.so b/blender/lib/libboost_python310.so new file mode 120000 index 0000000..189adf3 --- /dev/null +++ b/blender/lib/libboost_python310.so @@ -0,0 +1 @@ +libboost_python310.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_python310.so.1.80.0 b/blender/lib/libboost_python310.so.1.80.0 new file mode 100644 index 0000000..a8e6607 Binary files /dev/null and b/blender/lib/libboost_python310.so.1.80.0 differ diff --git a/blender/lib/libboost_regex.so b/blender/lib/libboost_regex.so new file mode 120000 index 0000000..80a3157 --- /dev/null +++ b/blender/lib/libboost_regex.so @@ -0,0 +1 @@ +libboost_regex.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_regex.so.1.80.0 b/blender/lib/libboost_regex.so.1.80.0 new file mode 100644 index 0000000..1f294bc Binary files /dev/null and b/blender/lib/libboost_regex.so.1.80.0 differ diff --git a/blender/lib/libboost_serialization.so b/blender/lib/libboost_serialization.so new file mode 120000 index 0000000..a498078 --- /dev/null +++ b/blender/lib/libboost_serialization.so @@ -0,0 +1 @@ +libboost_serialization.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_serialization.so.1.80.0 b/blender/lib/libboost_serialization.so.1.80.0 new file mode 100644 index 0000000..8e1e4e5 Binary files /dev/null and b/blender/lib/libboost_serialization.so.1.80.0 differ diff --git a/blender/lib/libboost_system.so b/blender/lib/libboost_system.so new file mode 120000 index 0000000..f78bd9e --- /dev/null +++ b/blender/lib/libboost_system.so @@ -0,0 +1 @@ +libboost_system.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_system.so.1.80.0 b/blender/lib/libboost_system.so.1.80.0 new file mode 100644 index 0000000..1d967ea Binary files /dev/null and b/blender/lib/libboost_system.so.1.80.0 differ diff --git a/blender/lib/libboost_thread.so b/blender/lib/libboost_thread.so new file mode 120000 index 0000000..202a22c --- /dev/null +++ b/blender/lib/libboost_thread.so @@ -0,0 +1 @@ +libboost_thread.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_thread.so.1.80.0 b/blender/lib/libboost_thread.so.1.80.0 new file mode 100644 index 0000000..3fb302a Binary files /dev/null and b/blender/lib/libboost_thread.so.1.80.0 differ diff --git a/blender/lib/libboost_wave.so b/blender/lib/libboost_wave.so new file mode 120000 index 0000000..fccd6a6 --- /dev/null +++ b/blender/lib/libboost_wave.so @@ -0,0 +1 @@ +libboost_wave.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_wave.so.1.80.0 b/blender/lib/libboost_wave.so.1.80.0 new file mode 100644 index 0000000..164093d Binary files /dev/null and b/blender/lib/libboost_wave.so.1.80.0 differ diff --git a/blender/lib/libboost_wserialization.so b/blender/lib/libboost_wserialization.so new file mode 120000 index 0000000..d448393 --- /dev/null +++ b/blender/lib/libboost_wserialization.so @@ -0,0 +1 @@ +libboost_wserialization.so.1.80.0 \ No newline at end of file diff --git a/blender/lib/libboost_wserialization.so.1.80.0 b/blender/lib/libboost_wserialization.so.1.80.0 new file mode 100644 index 0000000..cf6cd18 Binary files /dev/null and b/blender/lib/libboost_wserialization.so.1.80.0 differ diff --git a/blender/lib/libcycles_kernel_oneapi_aot.so b/blender/lib/libcycles_kernel_oneapi_aot.so new file mode 100644 index 0000000..6fb2ab8 Binary files /dev/null and b/blender/lib/libcycles_kernel_oneapi_aot.so differ diff --git a/blender/lib/libembree4.so b/blender/lib/libembree4.so new file mode 120000 index 0000000..a7661fe --- /dev/null +++ b/blender/lib/libembree4.so @@ -0,0 +1 @@ +libembree4.so.4 \ No newline at end of file diff --git a/blender/lib/libembree4.so.4 b/blender/lib/libembree4.so.4 new file mode 100644 index 0000000..a288db7 Binary files /dev/null and b/blender/lib/libembree4.so.4 differ diff --git a/blender/lib/libopenvdb.so b/blender/lib/libopenvdb.so new file mode 120000 index 0000000..1615ade --- /dev/null +++ b/blender/lib/libopenvdb.so @@ -0,0 +1 @@ +libopenvdb.so.10.0 \ No newline at end of file diff --git a/blender/lib/libopenvdb.so.10.0 b/blender/lib/libopenvdb.so.10.0 new file mode 120000 index 0000000..32eee90 --- /dev/null +++ b/blender/lib/libopenvdb.so.10.0 @@ -0,0 +1 @@ +libopenvdb.so.10.0.0 \ No newline at end of file diff --git a/blender/lib/libopenvdb.so.10.0.0 b/blender/lib/libopenvdb.so.10.0.0 new file mode 100644 index 0000000..e3b8ded Binary files /dev/null and b/blender/lib/libopenvdb.so.10.0.0 differ diff --git a/blender/lib/libosdCPU.so b/blender/lib/libosdCPU.so new file mode 120000 index 0000000..c509c34 --- /dev/null +++ b/blender/lib/libosdCPU.so @@ -0,0 +1 @@ +libosdCPU.so.3.5.0 \ No newline at end of file diff --git a/blender/lib/libosdCPU.so.3.5.0 b/blender/lib/libosdCPU.so.3.5.0 new file mode 100644 index 0000000..0ee992b Binary files /dev/null and b/blender/lib/libosdCPU.so.3.5.0 differ diff --git a/blender/lib/libosdGPU.so b/blender/lib/libosdGPU.so new file mode 120000 index 0000000..4d0e04b --- /dev/null +++ b/blender/lib/libosdGPU.so @@ -0,0 +1 @@ +libosdGPU.so.3.5.0 \ No newline at end of file diff --git a/blender/lib/libosdGPU.so.3.5.0 b/blender/lib/libosdGPU.so.3.5.0 new file mode 100644 index 0000000..75b64d0 Binary files /dev/null and b/blender/lib/libosdGPU.so.3.5.0 differ diff --git a/blender/lib/libpi_level_zero.so b/blender/lib/libpi_level_zero.so new file mode 100644 index 0000000..6e879c4 Binary files /dev/null and b/blender/lib/libpi_level_zero.so differ diff --git a/blender/lib/libsycl.so b/blender/lib/libsycl.so new file mode 120000 index 0000000..141effe --- /dev/null +++ b/blender/lib/libsycl.so @@ -0,0 +1 @@ +libsycl.so.6 \ No newline at end of file diff --git a/blender/lib/libsycl.so.6 b/blender/lib/libsycl.so.6 new file mode 120000 index 0000000..82505ab --- /dev/null +++ b/blender/lib/libsycl.so.6 @@ -0,0 +1 @@ +libsycl.so.6.1.0-0 \ No newline at end of file diff --git a/blender/lib/libsycl.so.6.1.0-0 b/blender/lib/libsycl.so.6.1.0-0 new file mode 100644 index 0000000..5ab899f Binary files /dev/null and b/blender/lib/libsycl.so.6.1.0-0 differ diff --git a/blender/lib/libtbb.so b/blender/lib/libtbb.so new file mode 120000 index 0000000..8c6f6be --- /dev/null +++ b/blender/lib/libtbb.so @@ -0,0 +1 @@ +libtbb.so.2 \ No newline at end of file diff --git a/blender/lib/libtbb.so.2 b/blender/lib/libtbb.so.2 new file mode 100644 index 0000000..2838f6d Binary files /dev/null and b/blender/lib/libtbb.so.2 differ diff --git a/blender/lib/libusd_ms.so b/blender/lib/libusd_ms.so new file mode 100644 index 0000000..10cf08e Binary files /dev/null and b/blender/lib/libusd_ms.so differ diff --git a/blender/lib/mesa/libGL.so b/blender/lib/mesa/libGL.so new file mode 120000 index 0000000..3cdd9c0 --- /dev/null +++ b/blender/lib/mesa/libGL.so @@ -0,0 +1 @@ +libGL.so.1 \ No newline at end of file diff --git a/blender/lib/mesa/libGL.so.1 b/blender/lib/mesa/libGL.so.1 new file mode 120000 index 0000000..9020ff5 --- /dev/null +++ b/blender/lib/mesa/libGL.so.1 @@ -0,0 +1 @@ +libGL.so.1.5.0 \ No newline at end of file diff --git a/blender/lib/mesa/libGL.so.1.5.0 b/blender/lib/mesa/libGL.so.1.5.0 new file mode 100644 index 0000000..1ea3d91 Binary files /dev/null and b/blender/lib/mesa/libGL.so.1.5.0 differ diff --git a/blender/lib/mesa/libGLU.so b/blender/lib/mesa/libGLU.so new file mode 120000 index 0000000..3665143 --- /dev/null +++ b/blender/lib/mesa/libGLU.so @@ -0,0 +1 @@ +libGLU.so.1.3.1 \ No newline at end of file diff --git a/blender/lib/mesa/libGLU.so.1 b/blender/lib/mesa/libGLU.so.1 new file mode 120000 index 0000000..3665143 --- /dev/null +++ b/blender/lib/mesa/libGLU.so.1 @@ -0,0 +1 @@ +libGLU.so.1.3.1 \ No newline at end of file diff --git a/blender/lib/mesa/libGLU.so.1.3.1 b/blender/lib/mesa/libGLU.so.1.3.1 new file mode 100644 index 0000000..c415fcb Binary files /dev/null and b/blender/lib/mesa/libGLU.so.1.3.1 differ diff --git a/blender/lib/mesa/libglapi.so b/blender/lib/mesa/libglapi.so new file mode 120000 index 0000000..61a3e5f --- /dev/null +++ b/blender/lib/mesa/libglapi.so @@ -0,0 +1 @@ +libglapi.so.0 \ No newline at end of file diff --git a/blender/lib/mesa/libglapi.so.0 b/blender/lib/mesa/libglapi.so.0 new file mode 120000 index 0000000..9de6608 --- /dev/null +++ b/blender/lib/mesa/libglapi.so.0 @@ -0,0 +1 @@ +libglapi.so.0.0.0 \ No newline at end of file diff --git a/blender/lib/mesa/libglapi.so.0.0.0 b/blender/lib/mesa/libglapi.so.0.0.0 new file mode 100644 index 0000000..b39d85b Binary files /dev/null and b/blender/lib/mesa/libglapi.so.0.0.0 differ diff --git a/blender/lib/usd/ar/resources/plugInfo.json b/blender/lib/usd/ar/resources/plugInfo.json new file mode 100644 index 0000000..30fe145 --- /dev/null +++ b/blender/lib/usd/ar/resources/plugInfo.json @@ -0,0 +1,23 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "ArResolver": {}, + "ArDefaultResolver": { + "bases": [ + "ArResolver" + ], + "implementsContexts": true + }, + "ArPackageResolver": {} + } + }, + "LibraryPath": "", + "Name": "ar", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/glf/resources/plugInfo.json b/blender/lib/usd/glf/resources/plugInfo.json new file mode 100644 index 0000000..f9e2a7f --- /dev/null +++ b/blender/lib/usd/glf/resources/plugInfo.json @@ -0,0 +1,14 @@ +{ + "Plugins": [ + { + "Info": { + "ShaderResources": "shaders" + }, + "LibraryPath": "", + "Name": "glf", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/glf/resources/shaders/pcfShader.glslfx b/blender/lib/usd/glf/resources/shaders/pcfShader.glslfx new file mode 100644 index 0000000..c03cd8e --- /dev/null +++ b/blender/lib/usd/glf/resources/shaders/pcfShader.glslfx @@ -0,0 +1,141 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +---Percentage-Closer Filtering (PCF) + +-- glsl PCF.ShadowFilterVertex +// --------------------------------------------------------------------------- +// PCF compute shadow filter length in vertex shader +// --------------------------------------------------------------------------- +#define REQUIRE_SHADOW_FILTER_WIDTH + +-- glsl PCF.ShadowFilterFragment +// --------------------------------------------------------------------------- +// PCF shadow filtering in fragment shader +// --------------------------------------------------------------------------- + +#if NUM_SHADOWS > 0 + +#define SHADOW_FILTER + +#define JITTER_NUM_SAMPLES 16 +#define PCF_NUM_SAMPLES JITTER_NUM_SAMPLES + +FORWARD_DECL(float shadowSample(int index, vec4 coord)); +FORWARD_DECL(float shadowCompare(int index, vec4 coord)); + +vec2 jitter[JITTER_NUM_SAMPLES] = { + vec2(-0.94201624, -0.39906216), + vec2( 0.94558609, -0.76890725), + vec2(-0.09418410, -0.92938870), + vec2( 0.34495938, 0.29387760), + vec2(-0.91588581, 0.45771432), + vec2(-0.81544232, -0.87912464), + vec2(-0.38277543, 0.27676845), + vec2( 0.97484398, 0.75648379), + vec2( 0.44323325, -0.97511554), + vec2( 0.53742981, -0.47373420), + vec2(-0.26496911, -0.41893023), + vec2( 0.79197514, 0.19090188), + vec2(-0.24188840, 0.99706507), + vec2(-0.81409955, 0.91437590), + vec2( 0.19984126, 0.78641367), + vec2( 0.14383161, -0.14100790) +}; + +in vec2 FshadowFilterWidth[NUM_SHADOWS]; + +float +shadowFilter(int index, vec4 Pshadow, vec4 Peye) +{ + vec2 filterWidth = FshadowFilterWidth[index]; + + float sum = 0.0; + for (int i=0; i 0 + +-- glsl PCF.ShadowFilterFragmentOnly +// --------------------------------------------------------------------------- +// PCF shadow filtering with compute filter length on the fly. +// no vertex shader needed. +// --------------------------------------------------------------------------- + +#if NUM_SHADOWS > 0 + +#define SHADOW_FILTER +#define JITTER_NUM_SAMPLES 16 +#define PCF_NUM_SAMPLES JITTER_NUM_SAMPLES + +FORWARD_DECL(float shadowSample(int index, vec4 coord)); +FORWARD_DECL(float shadowCompare(int index, vec4 coord)); + +vec2 jitter[JITTER_NUM_SAMPLES] = { + vec2(-0.94201624, -0.39906216), + vec2( 0.94558609, -0.76890725), + vec2(-0.09418410, -0.92938870), + vec2( 0.34495938, 0.29387760), + vec2(-0.91588581, 0.45771432), + vec2(-0.81544232, -0.87912464), + vec2(-0.38277543, 0.27676845), + vec2( 0.97484398, 0.75648379), + vec2( 0.44323325, -0.97511554), + vec2( 0.53742981, -0.47373420), + vec2(-0.26496911, -0.41893023), + vec2( 0.79197514, 0.19090188), + vec2(-0.24188840, 0.99706507), + vec2(-0.81409955, 0.91437590), + vec2( 0.19984126, 0.78641367), + vec2( 0.14383161, -0.14100790) +}; + +// defined in simpleLighting.glslfx +FORWARD_DECL(vec2 computeShadowFilterWidth(int index, vec4 Peye)); + +float +shadowFilter(int index, vec4 Pshadow, vec4 Peye) +{ + vec2 filterWidth = computeShadowFilterWidth(index, Peye); + + float sum = 0.0; + for (int i=0; i 0 diff --git a/blender/lib/usd/glf/resources/shaders/simpleLighting.glslfx b/blender/lib/usd/glf/resources/shaders/simpleLighting.glslfx new file mode 100644 index 0000000..6dbfa6d --- /dev/null +++ b/blender/lib/usd/glf/resources/shaders/simpleLighting.glslfx @@ -0,0 +1,558 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/glf/shaders/simpleLighting.glslfx + +-- glsl SimpleLighting.GeometryInjection +#ifndef NUM_SHADOWS +#define NUM_SHADOWS 0 +#endif + +#if NUM_SHADOWS == 0 + +#else // NUM_SHADOWS == 0 + +struct ShadowMatrix { + mat4 eyeToShadowMatrix; + mat4 shadowToEyeMatrix; + float blur; + float bias; + float padding0; + float padding1; +}; + +FORWARD_DECL(mat4 GetWorldToViewMatrix()); +FORWARD_DECL(mat4 GetWorldToViewInverseMatrix()); + +ShadowMatrix GetShadow(int shadowIndex) { + ShadowMatrix shadowMat; + shadowMat.eyeToShadowMatrix = HdGet_shadow_worldToShadowMatrix(shadowIndex) + * GetWorldToViewInverseMatrix(); + shadowMat.shadowToEyeMatrix = GetWorldToViewMatrix() * + HdGet_shadow_shadowToWorldMatrix(shadowIndex); + shadowMat.blur = HdGet_shadow_blur(shadowIndex); + shadowMat.bias = HdGet_shadow_bias(shadowIndex); + return shadowMat; +} + +out vec2 FshadowFilterWidth[NUM_SHADOWS]; + +// Transforms a unit tangent vector parallel to a coordinate axis in +// shadow space to eye space and returns the length of the result +// (without doing the perspective division). +// +// The first argument is assumed to be a column of the shadow to eye +// matrix (using first column corresponds to unit tangent vector +// pointing in x-direction, ...). The second argument is supposed to +// be the (homogeneous) coordinates of the point in eye space. +// +float +shadowTangentLength(vec4 dir, vec4 Peye) +{ + // Computation similar to computeRayDirectionEye in volume.glslfx. + const vec3 shadowTangent = dir.xyz * Peye.w - dir.w * Peye.xyz; + return length(shadowTangent); +} + +// Computes the lengths that tangent vectors parallel to the x-, +// respectively, y-direction in shadow space at the current location +// need to have such that their image in eye space have length blur +// (without perspective division). +// +void +computeShadowFilterWidth(vec4 Peye) +{ + // interpolate filter width + for (int i = 0; i < NUM_SHADOWS; ++i) { + ShadowMatrix shadowMat = GetShadow(i); + FShadowFilterWidth[i] = + vec2(shadowMat.blur) / + vec2(shadowTangentLength(shadowMat.shadowToEyeMatrix[0], Peye), + shadowTangentLength(shadowMat.shadowToEyeMatrix[1], Peye)); + } +} + +#define COMPUTE_SHADOW_FILTER_WIDTH 1 + +#endif // NUM_SHADOWS == 0 + +-- glsl SimpleLighting.LightIntegrator + +// --------------------------------------------------------------------------- +// struct definitions +// --------------------------------------------------------------------------- +#ifndef NUM_LIGHTS +#define NUM_LIGHTS 0 +#endif +#ifndef NUM_SHADOWS +#define NUM_SHADOWS 0 +#endif + +#ifndef HD_HAS_integrateLights +#define HD_HAS_integrateLights +#endif + +struct LightSource { + vec4 position; + vec4 ambient; + vec4 diffuse; + vec4 specular; + vec4 spotDirection; + vec4 spotCutoffAndFalloff; + vec4 attenuation; + mat4 worldToLightTransform; + int shadowIndexStart; + int shadowIndexEnd; + bool hasShadow; + bool isIndirectLight; +}; + +struct ShadowMatrix { + mat4 eyeToShadowMatrix; + mat4 shadowToEyeMatrix; + float blur; + float bias; + float padding0; + float padding1; +}; + +struct LightingContribution { + vec3 ambient; + vec3 diffuse; + vec3 specular; +}; + +struct LightingInterfaceProperties { + float shininess; + float roughness; + float metallic; + vec3 matSpecular; +}; + +// --------------------------------------------------------------------------- +// data accessors +// --------------------------------------------------------------------------- +#if NUM_LIGHTS == 0 + +#else // NUM_LIGHTS == 0 + +bool GetUseLighting() { + return HdGet_useLighting(); +} + +bool GetUseColorMaterialDiffuse() { + return HdGet_useColorMaterialDiffuse(); +} + +FORWARD_DECL(mat4 GetWorldToViewMatrix()); +FORWARD_DECL(mat4 GetWorldToViewInverseMatrix()); + +LightSource GetLightSource(int lightIndex) { + LightSource light; + light.position = GetWorldToViewMatrix() * + HdGet_lightSource_position(lightIndex); + light.ambient = HdGet_lightSource_ambient(lightIndex); + light.diffuse = HdGet_lightSource_diffuse(lightIndex); + light.specular = HdGet_lightSource_specular(lightIndex); + light.spotDirection = GetWorldToViewMatrix() * + vec4(HdGet_lightSource_spotDirection(lightIndex), 0); + light.spotCutoffAndFalloff = vec4(HdGet_lightSource_spotCutoff(lightIndex), + HdGet_lightSource_spotFalloff(lightIndex), 0, 0); + light.attenuation = vec4(HdGet_lightSource_attenuation(lightIndex), 0); + light.worldToLightTransform = + HdGet_lightSource_worldToLightTransform(lightIndex); + light.shadowIndexStart = HdGet_lightSource_shadowIndexStart(lightIndex); + light.shadowIndexEnd = HdGet_lightSource_shadowIndexEnd(lightIndex); + light.hasShadow = HdGet_lightSource_hasShadow(lightIndex); + light.isIndirectLight = HdGet_lightSource_isIndirectLight(lightIndex); + return light; +} +#endif // NUM_LIGHTS == 0 + +#if NUM_SHADOWS == 0 + +#else // NUM_SHADOWS == 0 + +ShadowMatrix GetShadow(int shadowIndex) { + ShadowMatrix shadowMat; + shadowMat.eyeToShadowMatrix = HdGet_shadow_worldToShadowMatrix(shadowIndex) + * GetWorldToViewInverseMatrix(); + shadowMat.shadowToEyeMatrix = GetWorldToViewMatrix() * + HdGet_shadow_shadowToWorldMatrix(shadowIndex); + shadowMat.blur = HdGet_shadow_blur(shadowIndex); + shadowMat.bias = HdGet_shadow_bias(shadowIndex); + return shadowMat; +} + +float +shadowTangentLength(vec4 dir, vec4 Peye) +{ + const vec3 shadowTangent = dir.xyz * Peye.w - dir.w * Peye.xyz; + return length(shadowTangent); +} + +vec2 +computeShadowFilterWidth(int index, vec4 Peye) +{ + ShadowMatrix shadowMat = GetShadow(index); + return + vec2(shadowMat.blur) / + vec2(shadowTangentLength(shadowMat.shadowToEyeMatrix[0], Peye), + shadowTangentLength(shadowMat.shadowToEyeMatrix[1], Peye)); +} + +#endif // NUM_SHADOWS == 0 + +// --------------------------------------------------------------------------- +// lighting functions +// --------------------------------------------------------------------------- + +#if NUM_LIGHTS == 0 + +LightingContribution +integrateLightsConstant(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + LightingContribution result; + result.ambient = vec3(0.0); + //pefectly diffuse white hemisphere contribution + result.diffuse = vec3(1.0); + result.specular = vec3(0.0); + + return result; +} + +LightingContribution +integrateLightsDefault(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + // If no lights, lighting contribution is zero. + LightingContribution result; + result.ambient = vec3(0.0); + result.diffuse = vec3(0.0); + result.specular = vec3(0.0); + + return result; +} + +#else // NUM_LIGHTS == 0 + +#if NUM_SHADOWS > 0 +float +shadowCompare(int shadowIndex, vec4 coord) +{ + coord /= coord.w; + coord.z = min(1.0, coord.z + GetShadow(shadowIndex).bias); + + // Cascade selection (in shadowing()) ensures Pshadow.xy is in [0,1], + // but shadowFilter() can throw that off so we need to clamp. + // XXX: sample the next cascade over? + coord.xy = clamp(coord.xy, vec2(0), vec2(1)); + + return HdGet_shadowCompareTextures(shadowIndex, coord.xyz).x; +} + +#ifndef SHADOW_FILTER +float +shadowFilter(int shadowIndex, vec4 coord, vec4 Peye) +{ + return shadowCompare(shadowIndex, coord); +} +#endif // SHADOW_FILTER +#endif // NUM_SHADOWS > 0 + +float +shadowing(int lightIndex, vec4 Peye) +{ +#if NUM_SHADOWS > 0 + for (int shadowIndex = GetLightSource(lightIndex).shadowIndexStart; + shadowIndex <= GetLightSource(lightIndex).shadowIndexEnd; + ++shadowIndex) { + + vec4 coord = GetShadow(shadowIndex).eyeToShadowMatrix * Peye; + + // If the point-to-be-lit isn't covered by this cascade, skip to + // the next one... + if (any(lessThan(coord.xyz, vec3(0))) || + any(greaterThan(coord.xyz, coord.www))) { + continue; + } + + return shadowFilter(shadowIndex, coord, Peye); + } +#endif // NUM_SHADOWS > 0 + // ... either no shadows, or no coverage. + return 1.0; +} + +float +lightDistanceAttenuation(vec4 Peye, int index) +{ + float distAtten = 1.0; + LightSource light = GetLightSource(index); + if (light.position.w != 0.0) { + const float d = distance(light.position, Peye); + const float atten = light.attenuation[0] + + light.attenuation[1] * d + + light.attenuation[2] * d*d; + if (atten == 0) { + distAtten = 0.0; + } else { + distAtten = 1.0 / atten; + } + } + return distAtten; +} + +float +lightSpotAttenuation(vec3 l, int index) +{ + float spotAtten = 1.0; + LightSource light = GetLightSource(index); + if (light.spotCutoffAndFalloff.x < 180.0) { + float cosLight = dot(-l, light.spotDirection.xyz); + spotAtten = (cosLight < cos(radians(light.spotCutoffAndFalloff.x))) + ? 0.0 : pow(cosLight, light.spotCutoffAndFalloff.y); + } + return spotAtten; +} + +vec2 +projectSphericalToLatLong(vec3 sample3D) +{ + // project spherical coord onto latitude-longitude map with + // latitude: +y == pi/2 and longitude: +z == 0, +x == pi/2 + const float PI = 3.1415; + vec2 coord = vec2((atan(sample3D.z, sample3D.x) + 0.5 * PI) / (2.0 * PI), + acos(sample3D.y) / PI); + return coord; +} + +float +schlickFresnel(float EdotH) +{ + return pow(max(0.0, 1.0 - EdotH), 5.0); +} + +LightingContribution +evaluateIndirectLight(vec3 Neye, vec3 Reye, float NdotE, float EdotH, + LightingInterfaceProperties props, int i) +{ + LightingContribution result; + result.diffuse = vec3(0); + result.specular = vec3(0); + result.ambient = vec3(0); + +#ifdef HD_HAS_domeLightIrradiance + const vec3 matSpecular = props.matSpecular; + // Assumes IoR = 1.5 + const vec3 F0 = mix(0.04 * matSpecular, matSpecular, props.metallic); + const vec3 F90 = mix(props.matSpecular, vec3(1), props.metallic); + const float fresnel = schlickFresnel(EdotH); + const vec3 F = mix(F0, F90, fresnel); + + mat4 transformationMatrix = + GetLightSource(i).worldToLightTransform * GetWorldToViewInverseMatrix(); + + // Diffuse Component + vec3 dir = normalize((transformationMatrix * vec4(Neye,0.0)).xyz); + vec2 coord = projectSphericalToLatLong(dir); + vec3 diffuse = HdGet_domeLightIrradiance(coord).rgb; + + // Specular Component + const float MAX_REFLECTION_LOD = + textureQueryLevels(HdGetSampler_domeLightPrefilter()); + const float roughness = props.roughness; + const float lod = roughness * MAX_REFLECTION_LOD; + vec3 Rdir = normalize((transformationMatrix * vec4(Reye,0.0)).xyz); + vec2 Rcoord = projectSphericalToLatLong(Rdir); + vec3 prefilter = HdTextureLod_domeLightPrefilter(Rcoord, lod).rgb; + vec2 brdf = HdGet_domeLightBRDF(vec2(NdotE, roughness)).rg; + vec3 specular = prefilter * (F * brdf.x + brdf.y); + + result.diffuse = diffuse; + result.specular = specular; +#else + result.diffuse = vec3(1, 1, 1); +#endif + + return result; +} + +// for the compatibility, turn on shadowing by default. +#ifndef USE_SHADOWS +#define USE_SHADOWS 1 +#endif + +LightingContribution +integrateLightsDefault(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + LightingContribution result; + result.ambient = vec3(0); + result.diffuse = vec3(0); + result.specular = vec3(0); + + vec3 n = normalize(Neye); + vec3 e = normalize(-Peye.xyz); + + float shininess = props.shininess; + + for (int i = 0; i < NUM_LIGHTS; ++i) { + LightSource light = GetLightSource(i); + + vec4 Plight = (light.isIndirectLight) + ? vec4(0,0,0,1) + : light.position; + vec3 l = (Plight.w == 0.0) + ? normalize(Plight.xyz) + : normalize(Plight - Peye).xyz; + vec3 h = normalize(l + vec3(0,0,1)); // directional viewer + + if (light.isIndirectLight) { + float NdotE = max(0.0, dot(n, e)); + float EdotH = max(0.0, dot(e, h)); + vec3 Reye = reflect(-e, n); + + LightingContribution indirectLight = evaluateIndirectLight( + Neye, Reye, NdotE, EdotH, props, i); + + result.ambient += light.ambient.rgb; + result.diffuse += indirectLight.diffuse * light.diffuse.rgb; + result.specular += indirectLight.specular * light.specular.rgb; + } else { + //cosine of incident angle of light + float NdotL = max(0.0, dot(n, l)); + + //cosine of incident angle from halfway vector between the eye and the light + float NdotH = max(0.0, dot(n, h)); + + //Lambert + float d = NdotL; + + //Blinn-Phong + float s = pow(NdotH, shininess); + + float atten = lightDistanceAttenuation(Peye, i); + atten *= lightSpotAttenuation(l, i); + +#if USE_SHADOWS + float shadow = light.hasShadow ? + shadowing(/*lightIndex=*/i, Peye) : 1.0; +#else + float shadow = 1.0; +#endif + result.ambient += atten * light.ambient.rgb; + result.diffuse += atten * shadow * d * light.diffuse.rgb; + result.specular += atten * shadow * s * light.specular.rgb; + } + } + + return result; +} + +LightingContribution +integrateLightsConstant(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + LightingContribution result; + result.ambient = vec3(0); + result.specular = vec3(0); + result.diffuse = vec3(1); + + return result; +} + +#endif // NUM_LIGHTS == 0 + +-- glsl SimpleLighting.SimpleLighting + +// --------------------------------------------------------------------------- +// lighting functions +// --------------------------------------------------------------------------- + +FORWARD_DECL(LightingContribution integrateLights(vec4 Peye, vec3 Neye, + LightingInterfaceProperties props)); + +vec4 +simpleLighting(vec4 color, vec4 Peye, vec3 Neye, vec4 Ctint, + vec4 matDiffuse, vec4 matAmbient, vec4 matSpecular, float matShininess) +{ + LightingInterfaceProperties props; + + // Get the roughness and metallic values + +#if defined(HD_HAS_displayMetallic) + const float metallic = max(0.0, min(1.0, float(HdGet_displayMetallic()))); +#else + const float metallic = 0.0; +#endif + props.metallic = metallic; + +#if defined(HD_HAS_displayRoughness) + const float roughness = max(0.0, min(1.0, float(HdGet_displayRoughness()))); + const float specularExp = (1.0 - roughness) * 120.0 + 8.0; + props.shininess = specularExp; + matSpecular.rgb = mix(vec3(1.0), matDiffuse.rgb, metallic); +#else + const float roughness = 0.0; + props.shininess = matShininess; +#endif + props.roughness = roughness; + props.matSpecular = matSpecular.rgb; + + LightingContribution light = integrateLights(Peye, Neye, props); + + // determine the specular and diffuse intensity + const float Ks = (1.0 - roughness) + 2.0 * metallic; + + color.rgb += light.ambient * matAmbient.rgb; + color.rgb += Ctint.rgb * light.diffuse * matDiffuse.rgb; + color.rgb += Ks * light.specular * matSpecular.rgb; + + color.a = matDiffuse.a; + + return color; +} + +vec4 +simpleLightingMaterial(vec4 color, vec4 Peye, vec3 Neye, vec4 Ctint) +{ + // XXX todo: useColorMaterialDiffuse + + vec4 diffuse = color; + vec4 ambient = material.ambient; + vec4 specular = material.specular; + float shininess = material.shininess; + + color = material.emission + (material.sceneColor * ambient); + + return simpleLighting(color, Peye, Neye, Ctint, + diffuse, ambient, specular, shininess); +} + +vec4 +simpleLighting(vec4 color, vec4 Peye, vec3 Neye, vec4 Ctint) +{ + return simpleLightingMaterial(color, Peye, Neye, Ctint); +} diff --git a/blender/lib/usd/hd/resources/plugInfo.json b/blender/lib/usd/hd/resources/plugInfo.json new file mode 100644 index 0000000..60241d2 --- /dev/null +++ b/blender/lib/usd/hd/resources/plugInfo.json @@ -0,0 +1,12 @@ +{ + "Plugins": [ + { + "Info": {}, + "LibraryPath": "", + "Name": "hd", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/hdGp/resources/plugInfo.json b/blender/lib/usd/hdGp/resources/plugInfo.json new file mode 100644 index 0000000..80f8508 --- /dev/null +++ b/blender/lib/usd/hdGp/resources/plugInfo.json @@ -0,0 +1,24 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "HdGpSceneIndexPlugin": { + "bases": [ + "HdSceneIndexPlugin" + ], + "displayName": + "HdGpGenerativeProceduralResolvingSceneIndex", + "loadWithRenderer": "", + "priority": 0 + } + } + }, + "LibraryPath": "", + "Name": "hdGp", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/hdSt/resources/plugInfo.json b/blender/lib/usd/hdSt/resources/plugInfo.json new file mode 100644 index 0000000..4f836d6 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/plugInfo.json @@ -0,0 +1,52 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "HdSt_DependencySceneIndexPlugin" : { + "bases": ["HdSceneIndexPlugin"], + "loadWithRenderer" : "GL", + "priority": 0, + "displayName": "Storm Dependency Scene Index" + }, + "HdSt_DependencyForwardingSceneIndexPlugin" : { + "bases": ["HdSceneIndexPlugin"], + "loadWithRenderer" : "GL", + "priority": 0, + "displayName": "Storm Dependency Forwarding Scene Index" + }, + "HdSt_MaterialBindingResolvingSceneIndexPlugin" : { + "bases": ["HdSceneIndexPlugin"], + "loadWithRenderer" : "GL", + "priority": 0, + "displayName": "Scene Index to resolve material bindings." + }, + "HdSt_NodeIdentifierResolvingSceneIndexPlugin" : { + "bases": ["HdSceneIndexPlugin"], + "loadWithRenderer" : "GL", + "priority": 0, + "displayName": "Scene Index to resolve nodeIdentifier from glslfx sourceAsset." + }, + "HdSt_TerminalsResolvingSceneIndexPlugin" : { + "bases": ["HdSceneIndexPlugin"], + "loadWithRenderer" : "GL", + "priority": 0, + "displayName": "Scene Index to resolve terminal names." + }, + "HdSt_ImplicitSurfaceSceneIndexPlugin" : { + "bases": ["HdSceneIndexPlugin"], + "loadWithRenderer" : "GL", + "priority": 0, + "displayName": "Scene Index to turn implicit surfaces into prims suitable for Storm" + } + }, + "ShaderResources": "shaders" + }, + "LibraryPath": "", + "Name": "hdSt", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/hdSt/resources/shaders/basisCurves.glslfx b/blender/lib/usd/hdSt/resources/shaders/basisCurves.glslfx new file mode 100644 index 0000000..8cf4097 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/basisCurves.glslfx @@ -0,0 +1,1233 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/basisCurves.glslfx + +#import $TOOLS/hdSt/shaders/instancing.glslfx +#import $TOOLS/hdSt/shaders/terminals.glslfx +#import $TOOLS/hdSt/shaders/pointId.glslfx +#import $TOOLS/hdSt/shaders/visibility.glslfx + +// Known issues: +// * The direction of the 'v' post tessellation is inconsistent between +// curve representations with regards to whether it increases from left to right +// or right to left. If we start using materials that require 'v', we should fix +// this to be both consistent and match the RenderMan default orientation. +// +// * RenderMan uses 'u' describe the parameter along curve profile and 'v' to +// describe the curve length. It's opposite here. It would be good to align +// these once we start to use 'u' and 'v' in curve materials. +// +// * We might want to explore using fractional_even_spacing to better preserve +// the shape of cubic curves. +// +// * We've realized that u appears to be 'backwards' in many cases, and so we +// have updated many of the functions to use +// mix(endPointValue, startPointValue, u) when intuitively it should be +// the other way around. + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonData + +struct Coeffs +{ + vec4 basis; + vec4 tangent_basis; +}; + +struct CurveData +{ + vec4 Peye[4]; + vec3 Neye[4]; +}; + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTess.CurveData + +CurveData PopulatePeyeAndNeye() +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + + CurveData vertexData; + for (int i = 0; i < HD_NUM_PATCH_VERTS; i++) { + vertexData.Peye[i] = vec4(GetWorldToViewMatrix() * transform * + vec4(HdGet_points(i), 1.0)); + vertexData.Neye[i] = getNormal(transpose(transformInv * + GetWorldToViewInverseMatrix()), i); + } + return vertexData; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Tess.CurveData.Patch + +CurveData PopulatePeyeAndNeye() +{ + CurveData vertexData; + for (int i = 0; i < gl_MaxPatchVertices; i++) { + vertexData.Peye[i] = inData[i].Peye; + vertexData.Neye[i] = inData[i].Neye; + } + return vertexData; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Tess.CurveData.Wire + +CurveData PopulatePeye() +{ + CurveData vertexData; + for (int i = 0; i < gl_MaxPatchVertices; i++) { + vertexData.Peye[i] = inData[i].Peye; + } + return vertexData; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.TessFactorsGLSL + +void SetTessFactors(float out0, float out1, float out2, float out3, + float in0, float in1) +{ + gl_TessLevelOuter[0] = out0; + gl_TessLevelOuter[1] = out1; + gl_TessLevelOuter[2] = out2; + gl_TessLevelOuter[3] = out3; + + gl_TessLevelInner[0] = in0; + gl_TessLevelInner[1] = in1; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.TessFactorsMSL + +void SetTessFactors(float out0, float out1, float out2, float out3, + float in0, float in1) +{ + device half *tessAsHalf = (device half *)tessFactors + patch_id * 6; + + tessAsHalf[0] = half(out0); + tessAsHalf[1] = half(out1); + tessAsHalf[2] = half(out2); + tessAsHalf[3] = half(out3); + + tessAsHalf[4] = half(in0); + tessAsHalf[5] = half(in1); +} + +--- -------------------------------------------------------------------------- +-- layout Curves.Vertex.Patch + +[ + ["out block", "CurveVertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.Vertex.Patch + +// We will either generate a camera facing normal or use the authored normal. +FORWARD_DECL(vec3 getNormal(MAT4 transform)); +// Fwd declare methods defined in pointId.glslfx, that are used below. +FORWARD_DECL(int GetPointId()); +FORWARD_DECL(float GetPointRasterSize(int)); +FORWARD_DECL(void ProcessPointId(int)); + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + + outData.Peye = vec4(GetWorldToViewMatrix() * transform * + vec4(HdGet_points(), 1)); + outData.Neye = getNormal(transpose(transformInv * + GetWorldToViewInverseMatrix())); + + ProcessPrimvarsIn(); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + int pointId = GetPointId(); +#if defined(HD_HAS_pointSizeScale) + float scale = HdGet_pointSizeScale(); +#else + float scale = 1; +#endif + gl_PointSize = GetPointRasterSize(pointId) * scale; + ProcessPointId(pointId); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Vertex.Normal.Implicit + +vec3 getNormal(MAT4 transform) +{ + // Generate a camera-facing normal in camera/eye space, designed to match + // RenderMan. + return vec3(0, 0, 1); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Vertex.Normal.Oriented + +vec3 getNormal(MAT4 transform) +{ + return (transform * vec4(HdGet_normals(), 0)).xyz; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTess.Normal.Implicit + +vec3 getNormal(MAT4 transform, int index) +{ + // Generate a camera-facing normal in camera/eye space, designed to match + // RenderMan. + return vec3(0, 0, 1); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTess.Normal.Oriented + +vec3 getNormal(MAT4 transform, int index) +{ + return (transform * vec4(HdGet_normals(index), 0)).xyz; +} + +--- -------------------------------------------------------------------------- +-- layout Curves.Vertex.Wire + +[ + ["out block", "CurveVertexData", "outData", + ["vec4", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.Vertex.Wire + +// Fwd declare methods defined in pointId.glslfx, that are used below. +FORWARD_DECL(int GetPointId()); +FORWARD_DECL(float GetPointRasterSize(int)); +FORWARD_DECL(void ProcessPointId(int)); + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + outData.Peye = vec4(GetWorldToViewMatrix() * transform * + vec4(HdGet_points(), 1)); + + ProcessPrimvarsIn(); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + int pointId = GetPointId(); +#if defined(HD_HAS_pointSizeScale) + float scale = HdGet_pointSizeScale(); +#else + float scale = 1; +#endif + gl_PointSize = GetPointRasterSize(pointId) * scale; + ProcessPointId(pointId);} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonControl + +float GetMaxTess() +{ + // Should be replaced with a uniform + return 40; +} + +float GetPixelToTessRatio() +{ + // Should be replaced with a uniform + return 20.0; +} + +vec2 projectToScreen(MAT4 projMat, vec4 P, vec2 screen_size) +{ + vec4 res = vec4(projMat * P); + res /= res.w; + return (clamp(res.xy, -1.3f, 1.3f) + 1.0f) * (screen_size * 0.5f); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTessControl.Linear.Patch + +void main(void) +{ + CurveData vertexData = PopulatePeyeAndNeye(); + determineLODSettings(vertexData); +} + +--- -------------------------------------------------------------------------- +-- layout Curves.TessControl.Linear.Patch + +[ + ["out", "HD_NUM_PATCH_EVAL_VERTS"], + ["in block array", "CurveVertexData", "inData", "gl_MaxPatchVertices", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block array", "CurveVertexData", "outData", "HD_NUM_PATCH_EVAL_VERTS", + ["vec4", "Peye"], + ["vec3", "Neye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.TessControl.Linear.Patch + +void determineLODSettings(CurveData vertexData); +void main(void) +{ + if (gl_InvocationID == 0) { + CurveData vertexData = PopulatePeyeAndNeye(); + determineLODSettings(vertexData); + } + + outData[gl_InvocationID].Peye = inData[gl_InvocationID].Peye; + outData[gl_InvocationID].Neye = inData[gl_InvocationID].Neye; + + ProcessPrimvarsOut(); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonControl.Linear.Ribbon + +// Use the length of the control points in screen space to determine how many +// times to subdivide the curve. +void determineLODSettings(CurveData vertexData) +{ + SetTessFactors(1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonControl.Linear.HalfTube + +// Use the width of the control points in screen space to determine how +// many times to subdivide the curve. NOTE. As a quick hack, we leverage the +// fact that the normal isn't normalized at this point in the pipeline to +// provide a quick estimate of width in eye space. If that becomes a bad +// assumption in the future, this needs to be reworked. +void determineLODSettings(CurveData vertexData) +{ + MAT4 projMat = GetProjectionMatrix(); + vec4 viewport = GetViewport(); + vec2 screen_size = vec2(viewport.z, viewport.w); + + // NOTE. We've observed that outData.Neye is not normalized, and + // we're using its length as an estimator of the accumulated transform + float wEye0 = HdGet_widths(0) * length(vertexData.Neye[0]); + float wEye1 = HdGet_widths(1) * length(vertexData.Neye[1]); + + // project a point that is 'w' units away from the origin + vec2 v_w0 = projectToScreen(projMat, vec4(wEye0, 0, 0, 1), screen_size); + vec2 v_w1 = projectToScreen(projMat, vec4(wEye1, 0, 0, 1), screen_size); + + float maxTess = GetMaxTess(); + // reduce the tessellation in the width by this value. + float widthDecimation = 10.0; + + float maxWidthScreenSpace = max(length(v_w0), length(v_w1)); + + float level_w = clamp( + maxWidthScreenSpace / GetPixelToTessRatio() / widthDecimation, + 1.0f, maxTess); + + SetTessFactors(1.0f, level_w, 1.0f, level_w, level_w, 1.0f); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTessControl.Cubic.Wire +void main(void) +{ + MAT4 projMat = HdGet_projectionMatrix(); + vec4 viewport = HdGet_viewport(); + vec2 screen_size = vec2(viewport.z, viewport.w); + + CurveData vertexData = PopulatePeyeAndNeye(Peye, Neye); + vec2 v0 = projectToScreen(projMat, vertexData.Peye[0], screen_size); + vec2 v1 = projectToScreen(projMat, vertexData.Peye[1], screen_size); + vec2 v2 = projectToScreen(projMat, vertexData.Peye[2], screen_size); + vec2 v3 = projectToScreen(projMat, vertexData.Peye[3], screen_size); + + float maxTess = GetMaxTess(); + + // Need to handle off screen + float dist = distance(v0, v1) + distance(v1, v2) + distance(v2, v3); + float level = clamp(dist / GetPixelToTessRatio(), 0.0f, maxTess); + + SetTessFactors(1.0f, 1.0f, 1.0f, 1.0f, 1.0f, level); +} + +--- -------------------------------------------------------------------------- +-- layout Curves.TessControl.Cubic.Wire + +[ + ["out", "HD_NUM_PATCH_EVAL_VERTS"], + ["in block array", "CurveVertexData", "inData", "gl_MaxPatchVertices", + ["vec4", "Peye"] + ], + ["out block array", "CurveVertexData", "outData", "HD_NUM_PATCH_EVAL_VERTS", + ["vec4", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.TessControl.Cubic.Wire + +void determineLODSettings(CurveData vertexData); +void main(void) +{ + if (gl_InvocationID == 0) { + CurveData vertexData = PopulatePeye(); + determineLODSettings(vertexData); + } + + outData[gl_InvocationID].Peye = inData[gl_InvocationID].Peye; + + ProcessPrimvarsOut(); +} + +// Use the length of the control points in screen space to determine how many +// times to subdivide the curve. +void determineLODSettings(CurveData vertexData) +{ + MAT4 projMat = GetProjectionMatrix(); + vec4 viewport = GetViewport(); + vec2 screen_size = vec2(viewport.z, viewport.w); + vec2 v0 = projectToScreen(projMat, vertexData.Peye[0], screen_size); + vec2 v1 = projectToScreen(projMat, vertexData.Peye[1], screen_size); + vec2 v2 = projectToScreen(projMat, vertexData.Peye[2], screen_size); + vec2 v3 = projectToScreen(projMat, vertexData.Peye[3], screen_size); + + float maxTess = GetMaxTess(); + + // Need to handle off screen + float dist = distance(v0, v1) + distance(v1, v2) + distance(v2, v3); + float level = clamp(dist / GetPixelToTessRatio(), 0.0f, maxTess); + + SetTessFactors(1.0f, level, 0.0f, 0.0f, 0.0f, 0.0f); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTessControl.Cubic.Patch + +void main(void) +{ + CurveData vertexData = PopulatePeyeAndNeye(); + determineLODSettings(vertexData); +} + +--- -------------------------------------------------------------------------- +-- layout Curves.TessControl.Cubic.Patch + +[ + ["out", "HD_NUM_PATCH_EVAL_VERTS"], + ["in block array", "CurveVertexData", "inData", "gl_MaxPatchVertices", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block array", "CurveVertexData", "outData", "HD_NUM_PATCH_EVAL_VERTS", + ["vec4", "Peye"], + ["vec3", "Neye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.TessControl.Cubic.Patch + +void determineLODSettings(CurveData vertexData); +void main(void) +{ + if (gl_InvocationID == 0) { + CurveData vertexData = PopulatePeyeAndNeye(); + determineLODSettings(vertexData); + } + + outData[gl_InvocationID].Peye = inData[gl_InvocationID].Peye; + outData[gl_InvocationID].Neye = inData[gl_InvocationID].Neye; + + ProcessPrimvarsOut(); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonControl.Cubic.Ribbon + +// Use the length of the control points in screen space to determine how many +// times to subdivide the curve. +void determineLODSettings(CurveData vertexData) +{ + MAT4 projMat = GetProjectionMatrix(); + vec4 viewport = GetViewport(); + vec2 screen_size = vec2(viewport.z, viewport.w); + vec2 v0 = projectToScreen(projMat, vertexData.Peye[0], screen_size); + vec2 v1 = projectToScreen(projMat, vertexData.Peye[1], screen_size); + vec2 v2 = projectToScreen(projMat, vertexData.Peye[2], screen_size); + vec2 v3 = projectToScreen(projMat, vertexData.Peye[3], screen_size); + + float maxTess = GetMaxTess(); + + // Need to handle off screen + float dist = distance(v0, v1) + distance(v1, v2) + distance(v2, v3); + float level = clamp(dist / GetPixelToTessRatio(), 0.0f, maxTess); + + SetTessFactors(level, 1.0f, level, 1.0f, 1.0f, level); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonControl.Cubic.HalfTube + +// Use the width & length of the control points in screen space to determine how +// many times to subdivide the curve. NOTE. As a quick hack, we leverage the +// fact that the normal isn't normalized at this point in the pipeline to +// provide a quick estimate of width in eye space. If that becomes a bad +// assumption in the future, this needs to be reworked. +void determineLODSettings(CurveData vertexData) +{ + MAT4 projMat = GetProjectionMatrix(); + vec4 viewport = GetViewport(); + vec2 screen_size = vec2(viewport.z, viewport.w); + vec2 v0 = projectToScreen(projMat, vertexData.Peye[0], screen_size); + vec2 v1 = projectToScreen(projMat, vertexData.Peye[1], screen_size); + vec2 v2 = projectToScreen(projMat, vertexData.Peye[2], screen_size); + vec2 v3 = projectToScreen(projMat, vertexData.Peye[3], screen_size); + + // NOTE. We've observed that outData.Neye is not normalized, and + // we're using its length as an estimator of the accumulated transform + float wEye0 = HdGet_widths(0) * length(vertexData.Neye[0]); + float wEye1 = HdGet_widths(1) * length(vertexData.Neye[1]); + float wEye2 = HdGet_widths(2) * length(vertexData.Neye[2]); + float wEye3 = HdGet_widths(3) * length(vertexData.Neye[3]); + + // project a point that is 'w' units away from the origin + vec2 v_w0 = projectToScreen(projMat, vec4(wEye0, 0, 0, 1), screen_size); + vec2 v_w1 = projectToScreen(projMat, vec4(wEye1, 0, 0, 1), screen_size); + vec2 v_w2 = projectToScreen(projMat, vec4(wEye2, 0, 0, 1), screen_size); + vec2 v_w3 = projectToScreen(projMat, vec4(wEye3, 0, 0, 1), screen_size); + + float maxTess = GetMaxTess(); + // reduce the tessellation in the width by this value. + float widthDecimation = 10.0; + + // Need to handle off screen + float dist = distance(v0, v1) + distance(v1, v2) + distance(v2, v3); + float level = clamp(dist / GetPixelToTessRatio(), 0.0f, maxTess); + + float maxWidthScreenSpace = + max(max(max(length(v_w0), length(v_w1)), length(v_w2)), length(v_w3)); + + float level_w = clamp( + maxWidthScreenSpace / GetPixelToTessRatio() / widthDecimation, + 1.0f, maxTess); + + SetTessFactors(level, level_w, level, level_w, level_w, level); +} + +--- -------------------------------------------------------------------------- +-- layout Curves.PostTessVertex.Cubic.Wire + +[ + ["in", "quads"], + ["in", "fractional_odd_spacing"], + ["in", "ccw"], + ["out block", "CurveVertexData", "outData", + ["vec4", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTessVertex.Cubic.Wire + +void main(void) +{ + float u = gl_TessCoord.x; + float v = .5; + + CurveData vertexData = PopulatePeyeAndNeye(); + vec4 cv0 = vertexData.Peye[0]; + vec4 cv1 = vertexData.Peye[1]; + vec4 cv2 = vertexData.Peye[2]; + vec4 cv3 = vertexData.Peye[3]; + + Coeffs coeffs = evaluateBasis(u, u*u, u*u*u); + vec4 basis = coeffs.basis; + vec4 pos = basis[0] * cv0 + + basis[1] * cv1 + + basis[2] * cv2 + + basis[3] * cv3; + + outData.Peye = pos; + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(basis, 0, 1, 2, 3, vec2(u, v)); // interpolate varying primvars +} + +--- -------------------------------------------------------------------------- +-- layout Curves.TessEval.Cubic.Wire + +[ + ["in", "isolines"], + ["in", "fractional_odd_spacing"], + ["in block array", "CurveVertexData", "inData", "gl_MaxPatchVertices", + ["vec4", "Peye"] + ], + ["out block", "CurveVertexData", "outData", + ["vec4", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.TessEval.Cubic.Wire + +FORWARD_DECL(Coeffs evaluateBasis(float u, float u2, float u3)); + +void main() +{ + float u = gl_TessCoord.x; + float v = .5; + + vec4 cv0 = inData[0].Peye; + vec4 cv1 = inData[1].Peye; + vec4 cv2 = inData[2].Peye; + vec4 cv3 = inData[3].Peye; + + Coeffs coeffs = evaluateBasis(u, u*u, u*u*u); + vec4 basis = coeffs.basis; + vec4 pos = basis[0] * cv0 + + basis[1] * cv1 + + basis[2] * cv2 + + basis[3] * cv3; + + outData.Peye = pos; + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(basis, 0, 1, 2, 3, vec2(u, v)); // interpolate varying primvars +} + +--- -------------------------------------------------------------------------- +-- layout Curves.PostTessVertex.Patch + +[ + ["in", "quads"], + ["in", "fractional_odd_spacing"], + ["in", "ccw"], + ["out block", "CurveVertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "float", "u"], + ["out", "float", "v"] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.PostTessVertex.Patch + +--- -------------------------------------------------------------------------- +-- layout Curves.TessEval.Patch + +[ + ["in", "quads"], + ["in", "fractional_odd_spacing"], + ["in", "ccw"], + ["in block array", "CurveVertexData", "inData", "gl_MaxPatchVertices", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block", "CurveVertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "float", "u"], + ["out", "float", "v"] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.TessEval.Patch + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonEval.Patch + +// Predefine so that we can later swap in the correct one depending +// on what type of curve we have + +FORWARD_DECL( + void evaluate(float u, float v, REF(thread, vec4) position, + REF(thread, vec4) tangent, REF(thread, float) width, + REF(thread, vec3) normal, CurveData vertexData)); +FORWARD_DECL(Coeffs evaluateBasis(float u, float u2, float u3)); + +// it's the responsibility of orient to store Neye, usually with either +// the computed normal or the tangent (from which the normal will be computed +// in the fragment shader.) +FORWARD_DECL(vec3 orient(float v, vec4 position, vec4 tangent, vec3 normal)); + +void main() +{ + u = gl_TessCoord.y; + v = gl_TessCoord.x; + + Coeffs coeffs = evaluateBasis(u, u*u, u*u*u); + vec4 basis = coeffs.basis; + + vec4 position; + vec4 tangent; + float rawWidth; + vec3 normal; + + CurveData vertexData = PopulatePeyeAndNeye(); + evaluate(u, v, position, tangent, rawWidth, normal, vertexData); + vec3 direction = orient(v, position, tangent, normal); + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + float worldSpaceWidth = rawWidth * length( + GetWorldToViewMatrix() * transform * vec4(direction, 0)); + + MAT4 projMat = GetProjectionMatrix(); + +#if defined(HD_HAS_screenSpaceWidths) || defined(HD_HAS_minScreenSpaceWidths) + + // If any screen space width operations are required, compute the + // conversion factor from world units to screen pixels at this curve tess + // position. Critically, this procedure does not rely on the thickening + // 'direction' vector, which may point out of the image plane and have + // zero apparent screen-space length in some circumstances. + // + // This procedure is correct for both perspective and ortho cameras. It is a + // boiled-down x-only expression of the projected pixel length of a + // hypothetical unit X vector in eye space, and can be derived by writing a + // projection matrix transforming (1,0,0,1) and performing the usual + // division by w. Since the viewport is 2 NDC units across, we take half the + // viewportSizeX. The division is by -position.z for perspective projections + // and by 1 for ortho projections, using entries 2,3 and 3,3 to select + // which. See articles on the forms of these projection matrices for more + // info. + float x = projMat[0][0]; + float w = position.z * projMat[2][3] + projMat[3][3]; + float viewportSizeX = GetViewport().z; + float worldToPixelWidth = abs((viewportSizeX * 0.5) * (x / w)); + +#ifdef HD_HAS_screenSpaceWidths + if (HdGet_screenSpaceWidths()) { + // Compute a world space width that yields the given width interpreted + // in screen space pixels. + worldSpaceWidth = rawWidth / worldToPixelWidth; + } +#endif + +#ifdef HD_HAS_minScreenSpaceWidths + // Compute a world space width that yields, at minimum, the given + // minScreenSpaceWidth interpreted in screen space pixels. + float minScreenSpaceWidth = HdGet_minScreenSpaceWidths(); + float screenSpaceWidth = worldSpaceWidth * worldToPixelWidth; + if (screenSpaceWidth < minScreenSpaceWidth) { + worldSpaceWidth *= minScreenSpaceWidth / screenSpaceWidth; + } +#endif + +#endif // end screen space operations + + vec3 offset = direction * worldSpaceWidth * 0.5; + position.xyz = position.xyz + offset; + position.w = 1; + + outData.Peye = position; + + gl_Position = vec4(projMat * outData.Peye); + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(basis, 0, 1, 2, 3, vec2(u, v)); // interpolate varying primvars +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonEval.Linear.Patch + +vec3 evaluateNormal(float u, CurveData vertexData) +{ + // XXX: This clamp is a hack to mask some odd orientation flipping issues + u = clamp(u, 1e-3f, 1.0f - 1e-3f); + return mix(vertexData.Neye[1], vertexData.Neye[0], u); +} + +void evaluate(float u, float v, REF(thread, vec4) position, + REF(thread, vec4) tangent, REF(thread, float) width, + REF(thread, vec3) normal, CurveData vertexData) { + vec4 p0 = vertexData.Peye[0]; + vec4 p1 = vertexData.Peye[1]; + + float w0 = HdGet_widths(0); + float w1 = HdGet_widths(1); + + position = mix(p1, p0, u); + tangent = normalize(p1 - p0); + width = mix(w1, w0, u); + normal = normalize(evaluateNormal(u, vertexData)); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonEval.Cubic.Patch + +FORWARD_DECL(Coeffs evaluateBasis(float u, float u2, float u3)); +FORWARD_DECL(float evaluateWidths(vec4 basis, float u)); +FORWARD_DECL(vec3 evaluateNormal(vec4 basis, float u, CurveData vertexData)); + +void evaluate(float u, float v, REF(thread, vec4) position, + REF(thread, vec4) tangent, REF(thread, float) width, + REF(thread, vec3) normal, CurveData vertexData) { + vec4 p0 = vertexData.Peye[0]; + vec4 p1 = vertexData.Peye[1]; + vec4 p2 = vertexData.Peye[2]; + vec4 p3 = vertexData.Peye[3]; + + Coeffs coeffs = evaluateBasis(u, u*u, u*u*u); + + position = coeffs.basis[0] * p0 + + coeffs.basis[1] * p1 + + coeffs.basis[2] * p2 + + coeffs.basis[3] * p3; + + tangent = coeffs.tangent_basis[0] * p0 + + coeffs.tangent_basis[1] * p1 + + coeffs.tangent_basis[2] * p2 + + coeffs.tangent_basis[3] * p3; + + width = evaluateWidths(coeffs.basis, u); + normal = normalize(evaluateNormal(coeffs.basis, u, vertexData)); + tangent = normalize(tangent); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonEval.HalfTube + +vec3 orient(float v, vec4 position, vec4 tangent, vec3 normal){ + outData.Neye = tangent.xyz; + vec3 d = normalize(cross(position.xyz, tangent.xyz)); + vec3 n = normalize(cross(d, tangent.xyz)); + + vec3 norm_pos = mix(n, d, (2.0*v) - 1.0); + vec3 norm_neg = mix(-d, n, (2.0*v)); + normal = normalize(mix(norm_neg, norm_pos, step(0.5, v))); + return normal; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonEval.Ribbon.Oriented + +vec3 orient(float v, vec4 position, vec4 tangent, vec3 normal){ + outData.Neye = normal; + return normalize(cross(tangent.xyz, normal) * (v - 0.5)); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CommonEval.Ribbon.Implicit + +vec3 orient(float v, vec4 position, vec4 tangent, vec3 normal){ + outData.Neye = tangent.xyz; + // NOTE: lava/lib/basisCurves currently uses tangent X position instead of + // tangent X normal. We should do a more thorough evaluation to see which + // is better but to minimize regressions, we're going to keep this as + // tangent X normal for now. + return normalize(cross(tangent.xyz, normal) * (v - 0.5)); +} + + +--- -------------------------------------------------------------------------- +-- glsl Curves.Cubic.Normals.Basis + +vec3 evaluateNormal(vec4 basis, float u, CurveData vertexData) +{ + vec3 n0 = vertexData.Neye[0]; + vec3 n1 = vertexData.Neye[1]; + vec3 n2 = vertexData.Neye[2]; + vec3 n3 = vertexData.Neye[3]; + return n0 * basis.x + + n1 * basis.y + + n2 * basis.z + + n3 * basis.w; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Cubic.Normals.Linear + +// HdSt only supports vertex (cubic) primvar indexes and expands varying +// (linear) primvars so we pull the data out of only the two interior indices. +// This may not be valid for all potential basis, but works well for curves with +// vstep = 1 and bezier, the only supported cubic curves in HdSt. + +vec3 evaluateNormal(vec4 basis, float u, CurveData vertexData) +{ + // XXX: This clamp is a hack to mask some odd orientation flipping issues + // for oriented bezier curves. + u = clamp(u, 1e-3f, 1.0f - 1e-3f); + return mix(vertexData.Neye[2], vertexData.Neye[1], u); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Cubic.Widths.Basis + +float evaluateWidths(vec4 basis, float u) +{ + float w0 = HdGet_widths(0); + float w1 = HdGet_widths(1); + float w2 = HdGet_widths(2); + float w3 = HdGet_widths(3); + return w0 * basis.x + + w1 * basis.y + + w2 * basis.z + + w3 * basis.w; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Cubic.Widths.Linear + +// HdSt only supports vertex (cubic) primvar indexes and expands varying +// (linear) primvars so we pull the data out of only the two interior indices. +// (ie. w0 -> widths[1], w1 -> widths[2]) +// This may not be valid for all potential basis, but works well for curves with +// vstep = 1 and bezier, the only supported cubic curves in HdSt. +float evaluateWidths(vec4 basis, float u) +{ + float w0 = HdGet_widths(1); + float w1 = HdGet_widths(2); + return mix(w1, w0, u); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Linear.VaryingInterpolation + +float InterpolatePrimvar(float inPv0, float inPv1, float inPv2, float inPv3, + vec4 basis, vec2 uv) +{ + return inPv0 * basis.x + + inPv1 * basis.y + + inPv2 * basis.z + + inPv3 * basis.w; +} + +vec2 InterpolatePrimvar(vec2 inPv0, vec2 inPv1, vec2 inPv2, vec2 inPv3, + vec4 basis, vec2 uv) +{ + return inPv0 * basis.x + + inPv1 * basis.y + + inPv2 * basis.z + + inPv3 * basis.w; +} + +vec3 InterpolatePrimvar(vec3 inPv0, vec3 inPv1, vec3 inPv2, vec3 inPv3, + vec4 basis, vec2 uv) +{ + return inPv0 * basis.x + + inPv1 * basis.y + + inPv2 * basis.z + + inPv3 * basis.w; +} + +vec4 InterpolatePrimvar(vec4 inPv0, vec4 inPv1, vec4 inPv2, vec4 inPv3, + vec4 basis, vec2 uv) +{ + return inPv0 * basis.x + + inPv1 * basis.y + + inPv2 * basis.z + + inPv3 * basis.w; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Cubic.VaryingInterpolation + +float InterpolatePrimvar(float inPv0, float inPv1, float inPv2, float inPv3, + vec4 basis, vec2 uv) +{ + return mix(inPv2, inPv1, uv.x); +} + +vec2 InterpolatePrimvar(vec2 inPv0, vec2 inPv1, vec2 inPv2, vec2 inPv3, + vec4 basis, vec2 uv) +{ + return mix(inPv2, inPv1, uv.x); +} + +vec3 InterpolatePrimvar(vec3 inPv0, vec3 inPv1, vec3 inPv2, vec3 inPv3, + vec4 basis, vec2 uv) +{ + return mix(inPv2, inPv1, uv.x); +} + +vec4 InterpolatePrimvar(vec4 inPv0, vec4 inPv1, vec4 inPv2, vec4 inPv3, + vec4 basis, vec2 uv) +{ + return mix(inPv2, inPv1, uv.x); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.BezierBasis + +Coeffs evaluateBasis(float u, float u2, float u3) +{ + vec4 basis; vec4 tangent_basis; + basis[0] = u3; + basis[1] = -3.0*u3 + 3.0*u2; + basis[2] = 3.0*u3 - 6.0*u2 + 3.0*u; + basis[3] = -1.0*u3 + 3.0*u2 - 3.0*u + 1.0; + + tangent_basis[0] = 3.0*u2; + tangent_basis[1] = -9.0*u2 + 6.0*u; + tangent_basis[2] = 9.0*u2 - 12.0*u + 3.0; + tangent_basis[3] = -3.0*u2 + 6.0*u - 3.0; + + Coeffs coeffs = { basis, tangent_basis }; + return coeffs; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.LinearBasis + +Coeffs evaluateBasis(float u, float u2, float u3) +{ + vec4 basis; vec4 tangent_basis; + basis[0] = u; + basis[1] = 1.0 - u; + basis[2] = 0; + basis[3] = 0.0; + + tangent_basis[0] = 1; + tangent_basis[1] = -1; + tangent_basis[2] = 0; + tangent_basis[3] = 0; + + Coeffs coeffs = { basis, tangent_basis }; + return coeffs; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.CatmullRomBasis + +Coeffs evaluateBasis(float u, float u2, float u3) +{ + vec4 basis; vec4 tangent_basis; + basis[0] = 0.5*u3 - 0.5*u2; + basis[1] = -1.5*u3 + 2.0*u2 + 0.5*u; + basis[2] = 1.5*u3 - 2.5*u2 + 1.0; + basis[3] = -0.5*u3 + u2 - 0.5*u; + + tangent_basis[0] = 1.5*u2 - u; + tangent_basis[1] = -4.5*u2 + 4.0*u + 0.5; + tangent_basis[2] = 4.5*u2 - 5.0*u; + tangent_basis[3] = -1.5*u2 + 2.0*u - 0.5; + + Coeffs coeffs = { basis, tangent_basis }; + return coeffs; +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.BsplineBasis + +Coeffs evaluateBasis(float u, float u2, float u3) +{ + vec4 basis; vec4 tangent_basis; + basis[0] = (1.0/6.0)*u3; + basis[1] = -0.5*u3 + 0.5*u2 + 0.5*u + (1.0/6.0); + basis[2] = 0.5*u3 - u2 + (2.0/3.0); + basis[3] = -(1.0/6.0)*u3 + 0.5*u2 - 0.5*u + (1.0/6.0); + + tangent_basis[0] = 0.5*u2; + tangent_basis[1] = -1.5*u2 + u + 0.5; + tangent_basis[2] = 1.5*u2 - 2.0*u; + tangent_basis[3] = -0.5*u2 + u - 0.5; + + Coeffs coeffs = { basis, tangent_basis }; + return coeffs; +} + +--- -------------------------------------------------------------------------- +-- layout Curves.Fragment.Wire + +[ + ["in block", "CurveVertexData", "inData", + ["vec4", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.Fragment.Wire + +void main(void) +{ + DiscardBasedOnTopologicalVisibility(); + + vec4 color = vec4(0.5, 0.5, 0.5, 1); +#ifdef HD_HAS_displayColor + color.rgb = HdGet_displayColor().rgb; +#endif +#ifdef HD_HAS_displayOpacity + color.a = HdGet_displayOpacity(); +#endif + color.rgb = ApplyColorOverrides(color).rgb; + + vec3 Peye = inData.Peye.xyz / inData.Peye.w; + + // We would like to have a better oriented normal here, however to keep the + // shader fast, we use this camera-facing approximation. + vec3 Neye = vec3(0,0,1); + + vec4 patchCoord = vec4(0); + + color.rgb = mix(color.rgb, + ShadingTerminal(vec4(Peye, 1), Neye, color, patchCoord).rgb, + GetLightingBlendAmount()); + +#ifdef HD_MATERIAL_TAG_MASKED + if (ShouldDiscardByAlpha(color)) { + discard; + } +#endif + + RenderOutput(vec4(Peye, 1), Neye, color, patchCoord); +} + +--- -------------------------------------------------------------------------- +-- layout Curves.Fragment.Patch + +[ + ["in", "float", "u", "centroid"], + ["in", "float", "v", "centroid"], + ["in block", "CurveVertexData", "inData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Curves.Fragment.Patch + +/// In the previous stage, we may have stored the tangent in Neye from which +/// we plan to compute a normal in the fragment shader. + +FORWARD_DECL(vec3 fragmentNormal(vec3 position, vec3 normal, float v)); +void main(void) +{ + DiscardBasedOnTopologicalVisibility(); + + vec4 color = vec4(0.5, 0.5, 0.5, 1); +#ifdef HD_HAS_displayColor + color.rgb = HdGet_displayColor().rgb; +#endif +#ifdef HD_HAS_displayOpacity + color.a = HdGet_displayOpacity(); +#endif + color.rgb = ApplyColorOverrides(color).rgb; + + vec3 Peye = inData.Peye.xyz / inData.Peye.w; + + vec3 Neye = fragmentNormal(Peye, inData.Neye, v); + + vec4 patchCoord = vec4(0); + color.rgb = mix(color.rgb, + ShadingTerminal(vec4(Peye, 1), Neye, color, patchCoord).rgb, + GetLightingBlendAmount()); + +#ifdef HD_MATERIAL_TAG_MASKED + if (ShouldDiscardByAlpha(color)) { + discard; + } +#endif + + RenderOutput(vec4(Peye, 1), Neye, color, patchCoord); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Fragment.HalfTube + +vec3 fragmentNormal(in vec3 position, in vec3 tangent, in float v) +{ + vec3 d = normalize(cross(position, tangent)); + vec3 n = normalize(cross(d, tangent)); + vec3 norm_pos = mix(n, d, (2.0*v) - 1.0); + vec3 norm_neg = mix(-d, n, (2.0*v)); + return normalize(mix(norm_neg, norm_pos, step(0.5, v))); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Fragment.Ribbon.Round + +float remapFragmentV(float v){ + // As we are using a plane to approximate a tube, we don't want to shade + // based on v but rather the projection of the tube's v onto the plane + return clamp((asin(v * 2.0 - 1.0) / (3.146 / 2.0) + 1.0) / 2.0, 0.0, 1.0); +} + +vec3 fragmentNormal(vec3 position, in vec3 tangent, float v) +{ + + // we slightly bias v towards 0.5 based on filterwidth as a hack to + // minimize aliasing + v = mix(remapFragmentV(v), 0.5, min(fwidth(v), .2)); + + vec3 d = normalize(cross(position, tangent)); + vec3 n = normalize(cross(d, tangent)); + vec3 norm_pos = mix(n, d, (2.0*v) - 1.0); + vec3 norm_neg = mix(-d, n, (2.0*v)); + + return normalize(mix(norm_neg, norm_pos, step(0.5, v))); +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Fragment.Ribbon.Oriented + +vec3 fragmentNormal(vec3 position, in vec3 normal, float v) +{ + normal = normalize(normal); + if (gl_FrontFacing){ + return normal; + } + else{ + return -normal; + } +} + +--- -------------------------------------------------------------------------- +-- glsl Curves.Fragment.Hair + +// XXX: Neye is interpolated in from previous stages, however the +// polarity is not stable due to instability in the cross-product in the +// TessEval shader. Once that is fixed, we could use Neye directly here. +// The normal computed here results in faceted shading. +// +vec3 fragmentNormal(vec3 position, in vec3 unused, float v) +{ + return cross(dFdx(position), dFdy(position)); +} diff --git a/blender/lib/usd/hdSt/resources/shaders/compute.glslfx b/blender/lib/usd/hdSt/resources/shaders/compute.glslfx new file mode 100644 index 0000000..06ba932 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/compute.glslfx @@ -0,0 +1,456 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/compute.glslfx + +-- configuration +{ + "techniques": { + "default": { + "smoothNormalsFloatToFloat": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstFloat", + "Compute.SmoothNormals" ] + }, + "smoothNormalsDoubleToDouble": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstDouble", + "Compute.SmoothNormals" ] + }, + "smoothNormalsFloatToPacked": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstPacked", + "Compute.SmoothNormals" ] + }, + "smoothNormalsDoubleToPacked": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstPacked", + "Compute.SmoothNormals" ] + }, + "flatNormalsTriFloatToFloat": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstFloat", + "Compute.FlatNormals", + "Compute.FlatNormalsTri" ] + }, + "flatNormalsTriDoubleToDouble": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstDouble", + "Compute.FlatNormals", + "Compute.FlatNormalsTri" ] + }, + "flatNormalsTriFloatToPacked": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstPacked", + "Compute.FlatNormals", + "Compute.FlatNormalsTri" ] + }, + "flatNormalsTriDoubleToPacked": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstPacked", + "Compute.FlatNormals", + "Compute.FlatNormalsTri" ] + }, + "flatNormalsQuadFloatToFloat": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstFloat", + "Compute.FlatNormals", + "Compute.FlatNormalsQuad" ] + }, + "flatNormalsQuadDoubleToDouble": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstDouble", + "Compute.FlatNormals", + "Compute.FlatNormalsQuad" ] + }, + "flatNormalsQuadFloatToPacked": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstPacked", + "Compute.FlatNormals", + "Compute.FlatNormalsQuad" ] + }, + "flatNormalsQuadDoubleToPacked": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstPacked", + "Compute.FlatNormals", + "Compute.FlatNormalsQuad" ] + }, + "flatNormalsTriQuadFloatToFloat": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstFloat", + "Compute.FlatNormals", + "Compute.FlatNormalsTriQuad" ] + }, + "flatNormalsTriQuadDoubleToDouble": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstDouble", + "Compute.FlatNormals", + "Compute.FlatNormalsTriQuad" ] + }, + "flatNormalsTriQuadFloatToPacked": { + "source": [ "Compute.NormalsSrcFloat", + "Compute.NormalsDstPacked", + "Compute.FlatNormals", + "Compute.FlatNormalsTriQuad" ] + }, + "flatNormalsTriQuadDoubleToPacked": { + "source": [ "Compute.NormalsSrcDouble", + "Compute.NormalsDstPacked", + "Compute.FlatNormals", + "Compute.FlatNormalsTriQuad" ] + }, + "quadrangulateFloat": { + "source": [ "Compute.QuadrangulateFloat", + "Compute.Quadrangulate" ] + }, + "quadrangulateDouble": { + "source": [ "Compute.QuadrangulateDouble", + "Compute.Quadrangulate" ] + }, + "evalStencils": { + "source": [ "Compute.EvalStencils" ] + } + } + } +} + +--- -------------------------------------------------------------------------- +-- glsl Compute.NormalsSrcFloat + +vec3 getPoint(int index) +{ + return vec3(points[index], + points[index + 1], + points[index + 2]); +} +--- -------------------------------------------------------------------------- +-- glsl Compute.NormalsSrcDouble + +vec3 getPoint(int index) +{ + return vec3(points[index], + points[index + 1], + points[index + 2]); +} +--- -------------------------------------------------------------------------- +-- glsl Compute.NormalsDstFloat + +void writeNormal(int nIndex, vec3 normal) +{ + normals[nIndex+0] = normal.x; + normals[nIndex+1] = normal.y; + normals[nIndex+2] = normal.z; +} +--- -------------------------------------------------------------------------- +-- glsl Compute.NormalsDstDouble + +void writeNormal(int nIndex, vec3 normal) +{ + normals[nIndex+0] = normal.x; + normals[nIndex+1] = normal.y; + normals[nIndex+2] = normal.z; +} +--- -------------------------------------------------------------------------- +-- glsl Compute.NormalsDstPacked + +void writeNormal(int nIndex, vec3 normal) +{ + normal *= 511.0; + normals[nIndex] = + ((int(normal.x) & 0x3ff) ) | + ((int(normal.y) & 0x3ff) << 10) | + ((int(normal.z) & 0x3ff) << 20); +} +--- -------------------------------------------------------------------------- +-- glsl Compute.SmoothNormals + +int getPointsIndex(int idx) +{ + return (idx+vertexOffset)*pointsStride + pointsOffset; +} + +int getNormalsIndex(int idx) +{ + return (idx+vertexOffset)*normalsStride + normalsOffset; +} + +void main() +{ + int index = int(hd_GlobalInvocationID.x); + if (index >= indexEnd) { + return; + } + + int offIndex = index * 2 + adjacencyOffset; + + int offset = entry[offIndex] + adjacencyOffset; + int valence = entry[offIndex + 1]; + + vec3 normal = vec3(0); + + vec3 current = getPoint(getPointsIndex(index)); + for (int i = 0; i < valence; ++i) { + int entryIdx = i * 2 + offset; + + int prevIdx = entry[entryIdx]; + int nextIdx = entry[entryIdx + 1]; + + vec3 next = getPoint(getPointsIndex(nextIdx)); + vec3 prev = getPoint(getPointsIndex(prevIdx)); + normal += cross(next - current, prev - current); + + } + float n = 1.0/max(length(normal), 0.000001); + normal *= n; + writeNormal(getNormalsIndex(index), normal); +} + +--- -------------------------------------------------------------------------- +-- glsl Compute.FlatNormals + +int getPointsIndex(int idx) +{ + return (idx+vertexOffset)*pointsStride + pointsOffset; +} + +int getNormalsIndex(int idx) +{ + return (idx+elementOffset)*normalsStride + normalsOffset; +} + +int getIndicesIndex(int idx) +{ + return (idx+topologyOffset)*indexStride + indexOffset; +} + +int getPrimitiveParamIndex(int idx) +{ + return (idx+topologyOffset)*pParamStride + pParamOffset; +} + +int getEdgeFlag(int pParam) +{ + return pParam & 3; +} + +int getFaceIndex(int pParam) +{ + return pParam >> 2; +} + +FORWARD_DECL(vec3 computeNormalForPrimIndex(int primIndex)); + +void main() +{ + int primIndex = int(hd_GlobalInvocationID.x); + if (primIndex >= primIndexEnd) { + return; + } + + int pParam = primitiveParam[getPrimitiveParamIndex(primIndex)]; + int edgeFlag = getEdgeFlag(pParam); + int faceIndex = getFaceIndex(pParam); + vec3 normal = vec3(0); + + if (getEdgeFlag(pParam) == 0) { + // 0 indicates an unsplit face (as authored) + normal += computeNormalForPrimIndex(primIndex); + + } else if (getEdgeFlag(pParam) == 1) { + // A subdivided face will have a run of prims with + // edge flags like: 1, 3, 3, 3, 2; where "3" denotes an interior + // prim. Only compute normals for the first prim in a face. + + int primCounter = 0; + do { + pParam = primitiveParam[getPrimitiveParamIndex( + primIndex+primCounter)]; + normal += computeNormalForPrimIndex(primIndex+primCounter); + primCounter++; + } while(getEdgeFlag(pParam) != 2); + + } else { + return; + } + float n = 1.0/max(length(normal), 0.000001); + normal *= n; + writeNormal(getNormalsIndex(faceIndex), normal); +} + +--- -------------------------------------------------------------------------- +-- glsl Compute.FlatNormalsTri + +ivec3 getIndices(int idx) +{ + return ivec3(indices[idx], + indices[idx+1], + indices[idx+2]); +} + +vec3 computeNormalForPrimIndex(int primIndex) +{ + ivec3 indices = getIndices(getIndicesIndex(primIndex)); + + vec3 p0 = getPoint(getPointsIndex(indices.x)); + vec3 p1 = getPoint(getPointsIndex(indices.y)); + vec3 p2 = getPoint(getPointsIndex(indices.z)); + + return cross(p1-p0, p2-p0); +} + +--- -------------------------------------------------------------------------- +-- glsl Compute.FlatNormalsQuad + +ivec4 getIndices(int idx) +{ + return ivec4(indices[idx], + indices[idx+1], + indices[idx+2], + indices[idx+3]); +} + +vec3 computeNormalForPrimIndex(int primIndex) +{ + ivec4 indices = getIndices(getIndicesIndex(primIndex)); + + vec3 p0 = getPoint(getPointsIndex(indices.x)); + vec3 p1 = getPoint(getPointsIndex(indices.y)); + vec3 p2 = getPoint(getPointsIndex(indices.z)); + vec3 p3 = getPoint(getPointsIndex(indices.w)); + + return cross(p0-p3, p2-p3) + cross(p2-p1, p0-p1); +} + +--- -------------------------------------------------------------------------- +-- glsl Compute.FlatNormalsTriQuad + +ivec4 getIndices(int idx) +{ + return ivec4(indices[idx], + indices[idx+1], + indices[idx+2], + indices[idx+4]); +} + +vec3 computeNormalForPrimIndex(int primIndex) +{ + ivec4 indices = getIndices(getIndicesIndex(primIndex)); + + vec3 p0 = getPoint(getPointsIndex(indices.x)); + vec3 p1 = getPoint(getPointsIndex(indices.y)); + vec3 p2 = getPoint(getPointsIndex(indices.z)); + vec3 p3 = getPoint(getPointsIndex(indices.w)); + + return cross(p0-p3, p2-p3) + cross(p2-p1, p0-p1); +} + +--- -------------------------------------------------------------------------- +-- glsl Compute.QuadrangulateFloat + +#define DATATYPE float + +--- -------------------------------------------------------------------------- +-- glsl Compute.QuadrangulateDouble + +#define DATATYPE double + +--- -------------------------------------------------------------------------- +-- glsl Compute.Quadrangulate + +void main() +{ + int index = int(hd_GlobalInvocationID.x); + if (index >= indexEnd) { + return; + } + + int quadInfoIndex = index * quadInfoStride + quadInfoOffset; + int numVert = quadInfo[quadInfoIndex]; + int dstOffset = quadInfo[quadInfoIndex+1]; + + // GPU quadinfo table layout + // + // struct NonQuad { + // int numVert; + // int dstOffset; + // int index[maxNumVert]; + // } quadInfo[] + // + + for (int j = 0; j < numComponents; ++j) { + DATATYPE center = 0; + for (int i = 0; i < numVert; ++i) { + int i0 = quadInfo[quadInfoIndex + 2 + i]; + int i1 = quadInfo[quadInfoIndex + 2 + (i+1)%numVert]; + + DATATYPE v0 = primvar[(i0 + vertexOffset)*primvarStride + primvarOffset + j]; + DATATYPE v1 = primvar[(i1 + vertexOffset)*primvarStride + primvarOffset + j]; + DATATYPE edge = (v0 + v1) * 0.5; + center += v0; + + // edge + primvar[(dstOffset + i + vertexOffset)*primvarStride + primvarOffset + j] = edge; + } + // center + center /= numVert; + primvar[(dstOffset + numVert + vertexOffset)*primvarStride + primvarOffset + j] = center; + } +} + +--- -------------------------------------------------------------------------- +-- glsl Compute.EvalStencils + +void main() +{ + const int pointIndex = pointIndexStart + int(hd_GlobalInvocationID.x); + if (pointIndex >= pointIndexEnd) { + return; + } + + const int numElements = EVAL_STENCILS_NUM_ELEMENTS; + float result[numElements]; + for (int element = 0; element < numElements; ++element) { + result[element] = 0; + } + + const int stencilSize = sizes[pointIndex + sizesBase]; + const int stencilOffset = offsets[pointIndex + offsetsBase]; + + for (int stencil = 0; stencil < stencilSize; ++stencil) { + const int index = indices[stencil + stencilOffset + indicesBase]; + const float weight = weights[stencil + stencilOffset + weightsBase]; + const int srcIndex = (index + srcBase) * srcStride; + for (int element = 0; element < numElements; ++element) { + result[element] += weight * primvar[srcIndex + element]; + } + } + + const int dstIndex = (pointIndex + dstBase) * dstStride; + for (int element = 0; element < numElements; ++element) { + primvar[dstIndex + element] = result[element]; + } +} diff --git a/blender/lib/usd/hdSt/resources/shaders/domeLight.glslfx b/blender/lib/usd/hdSt/resources/shaders/domeLight.glslfx new file mode 100644 index 0000000..486b03d --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/domeLight.glslfx @@ -0,0 +1,275 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "domeLightIrradiance": { + "source": [ "DomeLight.Common", + "DomeLight.Irradiance" ] + }, + "domeLightPrefilter": { + "source": [ "DomeLight.Common", + "DomeLight.CommonSampling", + "DomeLight.Prefilter" ] + }, + "domeLightBRDF": { + "source": [ "DomeLight.Common", + "DomeLight.CommonSampling", + "DomeLight.BRDF" ] + } + } + } +} + +--- -------------------------------------------------------------------------- +-- glsl DomeLight.Common + +const float PI = 3.1415926536; + +// compute texture coords based on the size of the output image/texture +vec2 GetTexCoords(ivec2 outCoords) +{ + vec2 outDims = vec2(HgiGetSize_outTexture()); + // apply a (0.5, 0.5) offset to use pixel centers and not pixel corners + vec2 texCoords = (vec2(outCoords) + vec2(0.5, 0.5)) / outDims; + return texCoords; +} + +// sample lat/long env map input texture +vec3 SampleEnvMapLod(vec3 sampleVec, float sampleLod) { + vec2 coord = vec2((atan(sampleVec.z, sampleVec.x) + PI) / (2.0 * PI), + acos(sampleVec.y) / PI); + return HgiTextureLod_inTexture(coord, sampleLod).rgb; +} + +// compute world position from texture coords +vec3 GetWorldPos(vec2 textureCoord) +{ + // have theta range from [-PI, PI] so the origin is in the center + // of the image + float theta = (textureCoord.x * 2.0 * PI) - PI; + float phi = (textureCoord.y * PI); + float x = cos(theta) * sin(phi); + float y = cos(phi); + float z = sin(theta) * sin(phi); + return vec3(x, y, z); +} + +--- -------------------------------------------------------------------------- +-- glsl DomeLight.CommonSampling + +float RadicalInverse(uint a) +{ + return float(bitfieldReverse(a)) * 2.3283064365386963e-10; // 0x1p-32 +} + +vec2 Hammersley2d(uint a, uint N) +{ + return vec2(float(a) / float(N), RadicalInverse(a)); +} + +vec3 ImportanceSample_GGX(vec2 Xi, float roughness, vec3 normal) +{ + // Maps a 2D point to a hemisphere with spread based on roughness + float alpha = roughness * roughness; + float phi = 2.0 * PI * Xi.x; + float cosTheta = sqrt((1.0 - Xi.y) / (1.0 + (alpha*alpha - 1.0) * Xi.y)); + float sinTheta = sqrt(1.0 - cosTheta * cosTheta); + vec3 H = vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta); + + // Tangent space + vec3 up = abs(normal.z) < 0.999 ? vec3(0.0, 0.0, 1.0) : vec3(1.0, 0.0, 0.0); + vec3 tangentX = normalize(cross(up, normal)); + vec3 tangentY = normalize(cross(normal, tangentX)); + + // Convert to world Space + return normalize(tangentX * H.x + tangentY * H.y + normal * H.z); +} + +--- -------------------------------------------------------------------------- +-- glsl DomeLight.Irradiance + +const float deltaPhi = (2.0f * float(PI)) / 180.0f; +const float deltaTheta = (0.5f * float(PI)) / 64.0f; + +vec3 SampleEnvMap(vec3 sampleVec) +{ + // sample from a mipmap level of the environment map determined by the + // size of the environment map and the number of samples we are taking + ivec2 inDims = HgiGetSize_inTexture(); + float mipLevel = ceil(log2(inDims.x * deltaPhi/(2.0 * PI)) * 2.0f); + + return SampleEnvMapLod(sampleVec, mipLevel); +} + +vec3 ComputeIrradiance(vec3 inPos) +{ + vec3 N = normalize(inPos); + vec3 up = vec3(0.0, 1.0, 0.0); + vec3 right = normalize(cross(up, N)); + up = cross(N, right); + + const float TWO_PI = PI * 2.0; + const float HALF_PI = PI * 0.5; + + vec3 color = vec3(0.0); + uint sampleCount = 0u; + for (float phi = 0.0; phi < TWO_PI; phi += deltaPhi) { + for (float theta = 0.0; theta < HALF_PI; theta += deltaTheta) { + vec3 tempVec = cos(phi) * right + sin(phi) * up; + vec3 sampleVector = cos(theta) * N + sin(theta) * tempVec; + color += SampleEnvMap(sampleVector).rgb * cos(theta) * sin(theta); + sampleCount++; + } + } + return PI * color / float(sampleCount); +} + +void main(void) +{ + ivec2 outCoords = ivec2(hd_GlobalInvocationID.xy); + + vec2 texCoords = GetTexCoords(outCoords); + vec3 pos3D = GetWorldPos(texCoords); + vec4 outColor = vec4(ComputeIrradiance(pos3D), 1.0); + + HgiSet_outTexture(outCoords, outColor); +} + +--- -------------------------------------------------------------------------- +-- glsl DomeLight.Prefilter + +// Normal Distribution function +float Distribution_GGX(float dotNH, float roughness) +{ + float alpha = roughness * roughness; + float alpha2 = alpha * alpha; + float denom = dotNH * dotNH * (alpha2 - 1.0) + 1.0; + return (alpha2)/(PI * denom*denom); +} + +vec3 PrefilterEnvMap(vec3 R, float roughness) +{ + vec3 N = R; + vec3 V = R; + vec3 color = vec3(0.0); + float totalWeight = 0.0; + float envMapDim = float(HgiGetSize_inTexture().x); + const uint numSamples = 1024u; + for (uint i = 0u; i < numSamples; i++) { + vec2 Xi = Hammersley2d(i, numSamples); + vec3 H = ImportanceSample_GGX(Xi, roughness, N); + vec3 L = 2.0 * dot(V, H) * H - V; + float dotNL = clamp(dot(N, L), 0.0, 1.0); + if (dotNL > 0.0) { + + float dotNH = clamp(dot(N, H), 0.0, 1.0); + float dotVH = clamp(dot(V, H), 0.0, 1.0); + + // Probability Distribution Function + float pdf = Distribution_GGX(dotNH, roughness) * dotNH + / (4.0 * dotVH) + 0.0001; + // Solid angle of current sample + float omegaS = 1.0 / (float(numSamples) * pdf); + // Solid angle of 1 pixel across all cube faces + float omegaP = 4.0 * PI / (6.0 * envMapDim * envMapDim); + // Biased (+1.0) mip level for better result + float mipLevel = roughness == 0.0 + ? 0.0 : max(0.5 * log2(omegaS / omegaP) + 1.0, 0.0f); + color += SampleEnvMapLod(L, mipLevel).rgb * dotNL; + totalWeight += dotNL; + + } + } + return (color / totalWeight); +} + +void main(void) +{ + ivec2 outCoords = ivec2(hd_GlobalInvocationID.xy); + + vec2 texCoords = GetTexCoords(outCoords); + vec3 pos3D = GetWorldPos(texCoords); + vec3 R = normalize(pos3D); + vec4 outColor = vec4(PrefilterEnvMap(R, inRoughness), 1.0); + + HgiSet_outTexture(outCoords, outColor); +} + +--- -------------------------------------------------------------------------- +-- glsl DomeLight.BRDF + +float Geometry_SchlicksmithGGX(float dotNL, float dotNV, float roughness) +{ + float k = (roughness * roughness) / 2.0; + float GL = dotNL / (dotNL * (1.0 - k) + k); + float GV = dotNV / (dotNV * (1.0 - k) + k); + return GL * GV; +} + +vec2 ComputeBRDF(float NoV, float roughness) +{ + // make sure NoV doesn't go exactly to 0 to avoid NaN + NoV = max(NoV, 0.001); + + // Normal always points along z-axis for the 2D lookup + const vec3 N = vec3(0.0, 0.0, 1.0); + vec3 V = vec3(sqrt(1.0 - NoV*NoV), 0.0, NoV); + + vec2 LUT = vec2(0.0); + const uint NUM_SAMPLES = 1024u; + for (uint i = 0u; i < NUM_SAMPLES; i++) { + vec2 Xi = Hammersley2d(i, NUM_SAMPLES); + vec3 H = ImportanceSample_GGX(Xi, roughness, N); + vec3 L = 2.0 * dot(V, H) * H - V; + + float dotNL = max(dot(N, L), 0.0); + float dotNV = max(dot(N, V), 0.0); + float dotVH = max(dot(V, H), 0.0); + float dotNH = max(dot(H, N), 0.0); + + if (dotNL > 0.0) { + float G = Geometry_SchlicksmithGGX(dotNL, dotNV, roughness); + float G_Vis = (G * dotVH) / (dotNH * dotNV); + float Fc = pow(1.0 - dotVH, 5.0); + LUT += vec2((1.0 - Fc) * G_Vis, Fc * G_Vis); + } + } + return LUT / float(NUM_SAMPLES); +} + +void main(void) +{ + ivec2 outCoords = ivec2(hd_GlobalInvocationID.xy); + + vec2 texCoords = GetTexCoords(outCoords); + // texCoords.x represents N dot E and texCoords.y represents roughness + vec4 outColor = vec4(ComputeBRDF(texCoords.x, texCoords.y), 0.0, 1.0); + + HgiSet_outTexture(outCoords, outColor); +} diff --git a/blender/lib/usd/hdSt/resources/shaders/edgeId.glslfx b/blender/lib/usd/hdSt/resources/shaders/edgeId.glslfx new file mode 100644 index 0000000..58eb91c --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/edgeId.glslfx @@ -0,0 +1,208 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/edgeId.glslfx + +--- -------------------------------------------------------------------------- +-- glsl EdgeId.Fragment.Common + +// Determines whether a fragment is on or close to an unhidden triangle edge. +const float edgePickRadius = 2; // in viewport pixels +const float edgePickParametricRadius = 0.02; // in parametric space + +// For smooth looking edges, use an exponential falloff for the opacity. +// Note: We use a slower falloff fn than in meshWire.glslfx to make the +// selected edges stand out. +float SelectedEdgeFalloff(float d) { + return exp2(-1 * d * d); +} + +--- -------------------------------------------------------------------------- +-- glsl EdgeId.Fragment.TriangleSurface + +FORWARD_DECL(vec3 GetEdgeDistanceTriangle()); + +vec3 ComputeBaseFaceEdgeDistanceTriangle() +{ + return GetEdgeDistanceTriangle(); +} + +--- -------------------------------------------------------------------------- +-- glsl EdgeId.Fragment.TriangleLines + +vec3 ComputeBaseFaceEdgeDistanceTriangle() +{ + // Use parametric distance since we can't use surface derivatives for lines + vec3 param = GetEdgeParamTriangle(); + bvec3 nearEdge = lessThan(param, vec3(edgePickParametricRadius)); + return vec3(edgePickRadius) * vec3(not(nearEdge)); +} + +--- -------------------------------------------------------------------------- +-- glsl EdgeId.Fragment.TriangleParam + +float GetSelectedEdgeOpacity() +{ + float closestEdgeDistance = 0.0; + + // The sequence of comparisons should match GetPrimitiveEdgeId() + vec3 dist = ComputeBaseFaceEdgeDistanceTriangle(); + if (dist.x < edgePickRadius) { + closestEdgeDistance = dist.x; + } else if (dist.y < edgePickRadius) { + closestEdgeDistance = dist.y; + } else if (dist.z < edgePickRadius) { + closestEdgeDistance = dist.z; + } else { + return 0; + } + + return SelectedEdgeFalloff(closestEdgeDistance); +} + +// called from hdx/renderPass.glslfx and selection.glslfx +int GetPrimitiveEdgeId() +{ + // 2 (0,1,0) + // ^ + // e2 / \ e1 + // / \' + // (0,0,1) 0 ----- 1 (1,0,0) + // e0 + + int edgeId = -1; + + vec3 dist = ComputeBaseFaceEdgeDistanceTriangle(); + if (dist.x < edgePickRadius) { + edgeId = 0; + } else if (dist.y < edgePickRadius) { + edgeId = 1; + } else if (dist.z < edgePickRadius) { + edgeId = 2; + } else { + return -1; // Not on a mesh edge + } + + const int edgeFlag = GetEdgeFlag(); + const int meshEdgeIndex = HdGetScalar_edgeIndices(); + + // Translate face-edge to authored mesh-edge + if (edgeFlag == 0) { + edgeId = meshEdgeIndex + edgeId; // regular triangle face. + } else if ((bool(edgeFlag & 1) && edgeId == 2) || + (bool(edgeFlag & 2) && edgeId == 0)) { + edgeId = -1; // Not an authored mesh edge + } else { + edgeId = meshEdgeIndex + edgeId - 1; + } + return edgeId; +} + +--- -------------------------------------------------------------------------- +-- glsl EdgeId.Fragment.QuadSurface + +vec4 ComputeBaseFaceEdgeDistanceQuad() +{ + return GetEdgeDistanceQuad(); +} + +--- -------------------------------------------------------------------------- +-- glsl EdgeId.Fragment.QuadLines + +vec4 ComputeBaseFaceEdgeDistanceQuad() +{ + // Use parametric distance since we can't use surface derivatives for lines + vec4 param = GetEdgeParamQuad(); + bvec4 nearEdge = lessThan(param, vec4(edgePickParametricRadius)); + return vec4(edgePickRadius) * vec4(not(nearEdge)); +} + +--- -------------------------------------------------------------------------- +-- glsl EdgeId.Fragment.QuadParam + +float GetSelectedEdgeOpacity() +{ + float closestEdgeDistance = 0.0; + + // The sequence of comparisons should match GetPrimitiveEdgeId() + vec4 dist = ComputeBaseFaceEdgeDistanceQuad(); + if (dist.x < edgePickRadius) { + closestEdgeDistance = dist.x; + } else if (dist.y < edgePickRadius) { + closestEdgeDistance = dist.y; + } else if (dist.z < edgePickRadius) { + closestEdgeDistance = dist.z; + } else if (dist.w < edgePickRadius) { + closestEdgeDistance = dist.w; + } else { + return 0; + } + + return SelectedEdgeFalloff(closestEdgeDistance); +} + +// called from hdx/renderPass.glslfx and selection.glslfx +int GetPrimitiveEdgeId() +{ + // e2 + // (0,1) 3 ------ 2 (1,1) + // | | + // e3 | | e1 + // | | + // (0,0) 0 ------ 1 (1,0) + // e0 + + int edgeId = -1; + + vec4 dist = ComputeBaseFaceEdgeDistanceQuad(); + if (dist.x < edgePickRadius) { + edgeId = 0; + } else if (dist.y < edgePickRadius) { + edgeId = 1; + } else if (dist.z < edgePickRadius) { + edgeId = 2; + } else if (dist.w < edgePickRadius) { + edgeId = 3; + } else { + return -1; // Not on a mesh edge + } + + const int edgeFlag = GetEdgeFlag(); + const ivec2 meshEdgeIndices = HdGet_edgeIndices(); + + // Translate face-edge to authored mesh-edge + if (edgeFlag == 0) { + edgeId = meshEdgeIndices[0] + edgeId; // regular quad face + } else if (edgeId == 0) { + edgeId = meshEdgeIndices[0]; + } else if (edgeId == 3) { + edgeId = meshEdgeIndices[1]; + } else { + edgeId = -1; // Not an authored mesh edge + } + return edgeId; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/fallbackLighting.glslfx b/blender/lib/usd/hdSt/resources/shaders/fallbackLighting.glslfx new file mode 100644 index 0000000..b34a3ef --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/fallbackLighting.glslfx @@ -0,0 +1,76 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/fallbackLighting.glslfx + +--- -------------------------------------------------------------------------- +-- glsl Fallback.LightIntegrator +#ifndef HD_HAS_integrateLights +#define HD_HAS_integrateLights +#endif + +struct LightingContribution { + vec3 diffuse; +}; + +struct LightingInterfaceProperties { + float unused; +}; + +LightingContribution +integrateLightsDefault(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + vec3 n = normalize(Neye); + + LightingContribution result; + result.diffuse = vec3(dot(n, vec3(0,0,1))); + + return result; +} + +LightingContribution +integrateLightsConstant(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + LightingContribution result; + //pefectly diffuse white hemisphere contribution + result.diffuse = vec3(1); + + return result; +} + +-- glsl Fallback.Lighting + +FORWARD_DECL( + LightingContribution integrateLights(vec4 Peye, vec3 Neye, + LightingInterfaceProperties props)); + +vec3 FallbackLighting(in vec3 Peye, in vec3 Neye, in vec3 color) +{ + LightingInterfaceProperties props; + LightingContribution light = integrateLights(vec4(Peye, 1), Neye, props); + return color * light.diffuse; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/fallbackLightingShader.glslfx b/blender/lib/usd/hdSt/resources/shaders/fallbackLightingShader.glslfx new file mode 100644 index 0000000..b0f95e8 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/fallbackLightingShader.glslfx @@ -0,0 +1,44 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/fallbackLightingShader.glslfx + +#import $TOOLS/hdSt/shaders/fallbackLighting.glslfx + +-- configuration +{ + "techniques": { + "default": { + "fragmentShader" : { + "source": [ + "Fallback.LightIntegrator", + "Fallback.Lighting" + ] + } + } + } +} diff --git a/blender/lib/usd/hdSt/resources/shaders/fallbackMaterialNetwork.glslfx b/blender/lib/usd/hdSt/resources/shaders/fallbackMaterialNetwork.glslfx new file mode 100644 index 0000000..2dea4e7 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/fallbackMaterialNetwork.glslfx @@ -0,0 +1,64 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/fallbackSurface.glslfx + +--- +--- The fallback shader is used as a replacement shader if the +--- original material shader failed to compile. It needs to +--- define both the surfaceShader() and displacementShader() terminals. +--- +-- configuration +{ + "techniques": { + "default": { + "displacementShader": { + "source": [ "Fallback.Displacement" ] + }, + "surfaceShader": { + "source": [ "Fallback.Surface" ] + } + } + } +} + +--- -------------------------------------------------------------------------- +-- glsl Fallback.Surface + +vec4 surfaceShader(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + // lighting + color.rgb = FallbackLighting(Peye.xyz, Neye, color.rgb); + return color; +} +--- -------------------------------------------------------------------------- +-- glsl Fallback.Displacement + +vec4 displacementShader(int index, vec4 Peye, vec3 Neye, vec4 patchCoord) +{ + return Peye; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/fallbackVolume.glslfx b/blender/lib/usd/hdSt/resources/shaders/fallbackVolume.glslfx new file mode 100644 index 0000000..4aa63f4 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/fallbackVolume.glslfx @@ -0,0 +1,114 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/fallbackVolume.glslfx + +-- configuration +{ + "techniques": { + "default": { + "volumeShader": { + "source": [ "FallbackVolume.VolumeShader" ] + } + } + } +} + +--- -------------------------------------------------------------------------- +-- glsl FallbackVolume.VolumeShader + +// Functions that the volume shader of a volume material would provide. +// +// The functions give extinction, scattering, emission at a point as well as +// a phase function such as Henyey-Greenstein (see, e.g., [1]). +// +// Only single scattering is taken into account and the result of the +// scattering function (together with the phase function) is used to compute +// the in-scattering component of the volume rendering equation for a light +// source (point lights only). +// The extinction function is supposed to return the sum of the +// absorption and out-scattering cross section. +// Note that the interpretation of emission here follows [1] (rather than [2]), +// so the emission is added by the ray-marcher without being multiplied by +// extinction. +// Note that one cannot use the fallback volume shader for assets containing +// "glow" rather "emission" where the convention is +// emission = glow * extinction. +// +// [1] Matt Pharr, Wenzel Jakob, Greg Humphreys, "Physically Based Rendering", +// Third Edition). +// [2] Julian Fong, Magnus Wrenninge, Christopher Kulla, Ralf Habel, +// "Production Volume Rendering", SIGGRAPH 2017 Course. + +// The functions given here use a density and emission field with a fixed +// albedo. +// +// + +// Extinction function, returns sum of absorption and out-scattering cross +// ratio. +// +float +extinctionFunction(vec3 p) +{ + return HdGet_density(p); +} + +// Scattering function, returns in-scattering cross-section (will be combined +// with phase function). +// +// Here: constant on ellipsoid and zero outside. +float +scatteringFunction(vec3 p) +{ + const float albedo = 0.18; + + return extinctionFunction(p) * albedo; +} + +// Emission function, returns emission cross-section. +// +// Here: zero since volume is not emitting light. +vec3 +emissionFunction(vec3 p) +{ + return HdGet_emission(p); +} + +// Phase function in volume rendering equation. +// +// Here: isotropic. +float +phaseFunction(vec3 direction1, vec3 direction2) +{ + const float pi = 3.14159265358979; + const float sphereArea = 4.0 * pi; + const float inverseSphereArea = 1.0 / sphereArea; + + return inverseSphereArea; +} + diff --git a/blender/lib/usd/hdSt/resources/shaders/frustumCull.glslfx b/blender/lib/usd/hdSt/resources/shaders/frustumCull.glslfx new file mode 100644 index 0000000..c53301a --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/frustumCull.glslfx @@ -0,0 +1,425 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/compute.glslfx + +#import $TOOLS/hdSt/shaders/instancing.glslfx + +--- -------------------------------------------------------------------------- +-- layout ViewFrustumCull.Counting + +[ + ["buffer readWrite", "ResultData", "drawIndirectResult", + ["atomic_int", "numVisibleInstances"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.Counting + +void +FrustumCullCountVisibleInstances(int resultInstanceCount) +{ + ATOMIC_ADD(numVisibleInstances[0], resultInstanceCount); +} + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.NoCounting + +void +FrustumCullCountVisibleInstances(int resultInstanceCount) +{ + // do nothing +} + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.NoTinyCull + +bool +FrustumCullIsTinyPrim(vec4 bboxMin, vec4 bboxMax, vec2 drawRangeNDC) +{ + // do nothing + return false; +} + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.TinyCull + +bool +FrustumCullIsTinyPrim(vec4 bboxMin, vec4 bboxMax, vec2 drawRangeNDC) +{ + // Check the length of the min/max diagonal, could do something better here. + vec2 ndcMin = (bboxMin.xy/max(.000001,bboxMin.w)); + vec2 ndcMax = (bboxMax.xy/max(.000001,bboxMax.w)); + float diag = distance(ndcMin, ndcMax); + + // Cull prims outside the min(x)/max(y) range. + // When max is negative, do not cull based on max size + + #define isLargeEnough (diag > drawRangeNDC.x) + #define isSmallEnough (drawRangeNDC.y < 0 || diag < drawRangeNDC.y) + + return !isLargeEnough || !isSmallEnough; + + #undef isLargeEnough + #undef isSmallEnough +} + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.IsVisible + +bool +FrustumCullIsVisible(MAT4 toClip, vec4 localMin, vec4 localMax, vec2 drawRangeNDC) +{ + // Disable culling when: + // (a) BBox is empty. An empty bbox defaults to [FLT_MAX, -FLT_MAX]), so + // min > max. + // (b) Bounds are infinite. + if (any(greaterThan(localMin, localMax)) || + any(isinf(localMin)) || any(isinf(localMax))) { + return true; + } + + // Transform the corners of the bounding box to clipping space. + vec4 p[8]; + p[0] = vec4(toClip * vec4(localMin.x, localMin.y, localMin.z, 1)); + p[2] = vec4(toClip * vec4(localMin.x, localMax.y, localMin.z, 1)); + p[5] = vec4(toClip * vec4(localMax.x, localMin.y, localMax.z, 1)); + p[7] = vec4(toClip * vec4(localMax.x, localMax.y, localMax.z, 1)); + + // This prim is visible if it wasn't culled and at least one tiny prim test + // failed. Test two axes here because size is measured in screen space. + // We front-load this test because it saves quite a bit of compute: in one + // test the framerate went from 7.7 to 9.0 FPS. + if (FrustumCullIsTinyPrim(p[0], p[7], drawRangeNDC) && + FrustumCullIsTinyPrim(p[2], p[5], drawRangeNDC)) + { + return false; + } + + // Finish computing points and perform frustum culling. + p[1] = vec4(toClip * vec4(localMin.x, localMin.y, localMax.z, 1)); + p[3] = vec4(toClip * vec4(localMin.x, localMax.y, localMax.z, 1)); + p[4] = vec4(toClip * vec4(localMax.x, localMin.y, localMin.z, 1)); + p[6] = vec4(toClip * vec4(localMax.x, localMax.y, localMin.z, 1)); + + // Test the corners of the bounding box against the clipping volume. + // clipFlags is effectively a 6-bit field, holding one bit of information + // per frustum plane. Each component of the vector holds 2 bits. + // If the bounding box overlaps the clip volume, then clipFlags will be + // (0b11, 0b11, 0b11). + ivec3 clipFlags = ivec3(0); + for (int i=0; i<8; ++i) { + vec4 clipPos = p[i]; + bvec3 clip0 = lessThan(clipPos.xyz, vec3(clipPos.w)); + bvec3 clip1 = greaterThan(clipPos.xyz, -vec3(clipPos.w)); + clipFlags |= ivec3(clip0) /* bit 0 */ + 2*ivec3(clip1) /*bit 1*/; + } + + return all(equal(clipFlags, ivec3(3))); +} + +--- -------------------------------------------------------------------------- +-- layout ViewFrustumCull.Vertex + +[ + ["in", "int", "instanceCountInput"], + ["in", "int", "drawCommandIndex"], + ["uniform block", "Uniforms", "ulocCullParams", + ["mat4", "cullMatrix"], + ["vec2", "drawRangeNDC"], + ["uint", "drawCommandNumUints"] + ], + ["buffer readWrite", "DispatchBuffer", "dispatchBuffer", + ["uint", "drawCommands", "[]"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.Vertex + +MAT4 GetCullMatrix() +{ + return MAT4(cullMatrix); +} + +void main() +{ + // instanceCountOffset is a relative offset in drawcommand struct. + // it's a second entry in both DrawArraysCommand and DrawElementsCommand. + const uint instanceCountOffset = 1; + + MAT4 transform = HdGet_transform(); + MAT4 toClip = GetCullMatrix() * transform; + + vec4 localMin = HdGet_bboxLocalMin(); + vec4 localMax = HdGet_bboxLocalMax(); + + bool isVisible = FrustumCullIsVisible( + toClip, localMin, localMax, drawRangeNDC); + + // Compute the index to the 'instanceCount' struct member in drawCommands. + uint index = uint(drawCommandIndex) * + drawCommandNumUints + instanceCountOffset; + + // Set the resulting instance count to 0 if the primitive is culled + // otherwise pass through the original incoming instance count. + uint resultInstanceCount = instanceCountInput * uint(isVisible); + drawCommands[index] = resultInstanceCount; + + FrustumCullCountVisibleInstances(int(resultInstanceCount)); +} + +--- -------------------------------------------------------------------------- +-- layout ViewFrustumCull.VertexInstancing + +[ + ["in", "int", "drawCommandIndex"], + ["uniform block", "Uniforms", "ulocCullParams", + ["mat4", "cullMatrix"], + ["vec2", "drawRangeNDC"], + ["uint", "drawCommandNumUints"], + ["int", "resetPass"] + ], + ["buffer readWrite", "DispatchBuffer", "dispatchBuffer", + ["atomic_uint", "drawCommands", "[]"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.VertexInstancing + +/* + per-instance culling + + We use instance index indirection buffer to switch prototypes efficiently. + Per-instance culling exploits this indirection to trim culled instances + from draw call. + + Example: Prototype mesh M is instanced for 5 instances. + + Instancer has instance primvars (translate, rotate, ...) + and index indirection for M. + + InstancePrimvar (T:translate) + +-------+-----------------------------+-----------+ + : |T0 T1 T2 T3 T4 T5 T6 T7 T8 T9| : + +-------+-----------------------------+-----------+ + ^ + InstanceIndices(for M) | + +-----------+---------------+-----------+ + : | 0 2 5 8 9 | : + +-----------+---------------+-----------+ + ^ + | + M: gl_InstanceID (0-5) + + We can draw all instances of M, by just drawing with numInstance = 6. + + For per-instance culling, we test each bbox against the frustum. + Then, we store only passed instance indices into culledInstanceIndices buffer, + as well as counting them. + + InstanceIndices(for M) + +-----------+---------------+-----------+ + : | 0 2 5 8 9 | : instanceCount = 5 + +-----------+---------------+-----------+ + V + (say only 2 and 8 are visible in frustum) + V + +-----------+---------------+-----------+ + : | 2 8 x x x | : instanceCount = 2 + +-----------+---------------+-----------+ + x:undefined value +*/ + +MAT4 GetCullMatrix() +{ + return MAT4(cullMatrix); +} + +void main() +{ + // instanceCountOffset is a relative offset in drawcommand struct. + // it's a second entry in both DrawArraysCommand and DrawElementsCommand. + const uint instanceCountOffset = 1; + + const uint instanceCountBufferOffset = + drawCommandIndex * drawCommandNumUints + instanceCountOffset; + + // reset pass + if (resetPass == 1) { + // note: we expect all instance invocations of this draw command + // are clearing same field to zero, and and so might not guard this + // access as atomic. + ATOMIC_STORE(drawCommands[instanceCountBufferOffset], 0); + return; + } + + // culling pass + // precondition: drawCommand.instanceCount has to be reset in the separate + // invocation unit. + + vec4 localMin = HdGet_bboxLocalMin(); + vec4 localMax = HdGet_bboxLocalMax(); + + MAT4 toClip = GetCullMatrix() * ApplyInstanceTransform(HdGet_transform()); + + bool isVisible = FrustumCullIsVisible( + toClip, localMin, localMax, drawRangeNDC); + + if (isVisible) { + // increment the instance count and store instanceIndex to culledInstanceIndices. + + uint id = ATOMIC_ADD(drawCommands[instanceCountBufferOffset], 1); + + SetCulledInstanceIndex(id); + FrustumCullCountVisibleInstances(1); + } +} + + +--- -------------------------------------------------------------------------- +-- layout ViewFrustumCull.Compute + +[ + ["uniform block", "Uniforms", "ulocCullParams", + ["mat4", "cullMatrix"], + ["vec2", "drawRangeNDC"], + ["uint", "drawCommandNumUints"] + ], + ["buffer readOnly", "DrawCullInput", "drawCullInput", + ["uint", "drawCullInput", "[]"] + ], + ["buffer readWrite", "DispatchBuffer", "dispatchBuffer", + ["uint", "drawCommands", "[]"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.Compute + +MAT4 GetCullMatrix() +{ + return MAT4(cullMatrix); +} + +void compute(int drawCommandIndex) +{ + SetDrawIndex(drawCommandIndex, 0); + + // instanceCountOffset is a relative offset in drawcommand struct. + // it's a second entry in both DrawArraysCommand and DrawElementsCommand. + const uint instanceCountOffset = 1; + + MAT4 transform = HdGet_transform(); + MAT4 toClip = GetCullMatrix() * transform; + + vec4 localMin = HdGet_bboxLocalMin(); + vec4 localMax = HdGet_bboxLocalMax(); + + bool isVisible = FrustumCullIsVisible( + toClip, localMin, localMax, drawRangeNDC); + + // Compute the index to the 'instanceCount' struct member in drawCommands. + uint instanceIndex = uint(drawCommandIndex) * + drawCommandNumUints + instanceCountOffset; + + // Set the resulting instance count to 0 if the primitive is culled + // otherwise pass through the original incoming instance count. + uint resultInstanceCount = drawCullInput[instanceIndex] * uint(isVisible); + drawCommands[instanceIndex] = resultInstanceCount; + + FrustumCullCountVisibleInstances(int(resultInstanceCount)); +} + +--- -------------------------------------------------------------------------- +-- layout ViewFrustumCull.ComputeInstancing + +[ + ["uniform block", "Uniforms", "ulocCullParams", + ["mat4", "cullMatrix"], + ["vec2", "drawRangeNDC"], + ["uint", "drawCommandNumUints"] + ], + ["buffer readOnly", "DrawCullInput", "drawCullInput", + ["uint", "drawCullInput", "[]"] + ], + ["buffer readWrite", "DispatchBuffer", "dispatchBuffer", + ["uint", "drawCommands", "[]"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl ViewFrustumCull.ComputeInstancing + +MAT4 GetCullMatrix() +{ + return MAT4(cullMatrix); +} + +void compute(int drawCommandIndex) +{ + SetDrawIndex(drawCommandIndex, 0); + + // instanceCountOffset is a relative offset in drawcommand struct. + // it's a second entry in both DrawArraysCommand and DrawElementsCommand. + const uint instanceCountOffset = 1; + + const uint instanceCountBufferOffset = + drawCommandIndex * drawCommandNumUints + instanceCountOffset; + + const uint instanceCount = drawCullInput[instanceCountBufferOffset]; + + vec4 localMin = HdGet_bboxLocalMin(); + vec4 localMax = HdGet_bboxLocalMax(); + MAT4 transform = HdGet_transform(); + + // Reset the instance count. + drawCommands[instanceCountBufferOffset] = 0; + + for (int i = 0; i < instanceCount; ++i) + { + SetDrawIndex(drawCommandIndex, i); + + MAT4 toClip = GetCullMatrix() * ApplyInstanceTransform(transform); + + bool isVisible = FrustumCullIsVisible( + toClip, localMin, localMax, drawRangeNDC); + + if (isVisible) { + // Increment the instance count and store instanceIndex to + // culledInstanceIndices. + uint id = drawCommands[instanceCountBufferOffset]; + + drawCommands[instanceCountBufferOffset] += 1; + + SetCulledInstanceIndex(id); + FrustumCullCountVisibleInstances(1); + } + } +} diff --git a/blender/lib/usd/hdSt/resources/shaders/imageShader.glslfx b/blender/lib/usd/hdSt/resources/shaders/imageShader.glslfx new file mode 100644 index 0000000..cbef261 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/imageShader.glslfx @@ -0,0 +1,120 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/imageShader.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "ImageShader.Vertex" ] + }, + "fragmentShader" : { + "source": [ "ImageShader.Fragment" ] + } + } + } +} + +--- -------------------------------------------------------------------------- +-- layout ImageShader.Vertex + +[ + ["out block", "VertexData", "outData", + ["vec2", "uv"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl ImageShader.Vertex + +void main(void) +{ + // Position the vertices to create a large-than-screen triangle. + // Adjust the UVs of the triangle to have 0-1 fit the screen exactly. + // 'st' is the geometric UV where the [bottom,left] returns [0, 0]. + // Unlike gl_fragCoord where the [bottom,left] defaults to [0.5, 0.5]. + // + // hd_VertexID=0 -> (-1,-1) + // hd_VertexID=1 -> ( 3,-1) + // hd_VertexID=2 -> (-1, 3) + // + // glDrawArrays( GL_TRIANGLES, 0, 3 ); + // + // ID=2 + // x,y=-1,3 + // u,v=0,2 + // |\ + // | \ + // | \ + // | \ + // |--------\ + // | | \ + // | screen | \ + // | | \ + // ---------------- + // ID=0 ID=1 + // x,y=-1,-1 x,y=3,-1 + // u,v=0,0 u,v=2,0 + // + // + float x = -1.0 + float(((hd_VertexID%3) & 1) << 2); + float y = -1.0 + float(((hd_VertexID%3) & 2) << 1); + outData.uv.x = (x+1.0) * 0.5; + outData.uv.y = (y+1.0) * 0.5; + + gl_Position = vec4(x, y, 0, 1); +} + +--- -------------------------------------------------------------------------- +-- layout ImageShader.Fragment + +[ + ["in block", "VertexData", "inData", + ["vec2", "uv"] + ], + ["out", "vec4", "colorOut"] +] + +--- -------------------------------------------------------------------------- +-- glsl ImageShader.Fragment + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) { + return integrateLightsDefault(Peye, Neye, props); +} +#endif +#endif + +void main(void) +{ + colorOut = imageShader(inData.uv); +} diff --git a/blender/lib/usd/hdSt/resources/shaders/instancing.glslfx b/blender/lib/usd/hdSt/resources/shaders/instancing.glslfx new file mode 100644 index 0000000..a4dfc67 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/instancing.glslfx @@ -0,0 +1,189 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/instancing.glslfx + +--- -------------------------------------------------------------------------- +-- glsl Instancing.Transform + +// quaternion to matrix. xyz = imaginary, w = real +MAT4 GetRotationMatrix(vec4 q) +{ + MAT4 r; + r[0].xyzw = vec4(1 - 2 * (q.y * q.y + q.z * q.z), + 2 * (q.x * q.y + q.z * q.w), + 2 * (q.x * q.z - q.y * q.w), + 0); + r[1].xyzw = vec4( 2 * (q.x * q.y - q.z * q.w), + 1 - 2 * (q.x * q.x + q.z * q.z), + 2 * (q.y * q.z + q.x * q.w), + 0); + r[2].xyzw = vec4( 2 * (q.x * q.z + q.y * q.w), + 2 * (q.y * q.z - q.x * q.w), + 1 - 2 * (q.x * q.x + q.y * q.y), + 0); + r[3] = vec4(0, 0, 0, 1); + return r; +} + +// --------------------------------------------------------------------------- + +MAT4 GetInstanceTransform(int level) +{ + MAT4 m = MAT4(1); +#ifdef HD_HAS_INSTANCE_instanceTransform + m = HdGetInstance_instanceTransform(level, MAT4(1)) * m; +#endif + + // instance transform elements are applied: + // scale then rotate then translate + // i.e. (T * R * S) * position + +#ifdef HD_HAS_INSTANCE_scale + vec3 s = HdGetInstance_scale(level, /*default=*/vec3(1)); + m = MAT4(s.x, 0, 0, 0, + 0, s.y, 0, 0, + 0, 0, s.z, 0, + 0, 0, 0, 1) * m; +#endif + +#ifdef HD_HAS_INSTANCE_rotate // GfQuat(ix, iy, iz, real) + vec4 q = HdGetInstance_rotate(level, /*default=*/vec4(0)); + m = GetRotationMatrix(q) * m; +#endif + +#ifdef HD_HAS_INSTANCE_translate + vec3 t = HdGetInstance_translate(level, /*default=*/vec3(0)); + m = MAT4( 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + t.x, t.y, t.z, 1) * m; +#endif + return m; +} + +MAT4 GetInstanceTransformInverse(int level) +{ + MAT4 m = MAT4(1); + +#ifdef HD_HAS_INSTANCE_instanceTransform + m = inverse(HdGetInstance_instanceTransform(level, MAT4(1))) * m; +#endif + +#ifdef HD_HAS_INSTANCE_translate + vec3 it = -HdGetInstance_translate(level, /*default=*/vec3(0)); // negate + m = MAT4( 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + it.x, it.y, it.z, 1) * m; +#endif + +#ifdef HD_HAS_INSTANCE_rotate + vec4 q = HdGetInstance_rotate(level, /*default=*/vec4(0)); + q.xyz = -q.xyz; // inverse rotataion axis + m = GetRotationMatrix(q) * m; +#endif + +#ifdef HD_HAS_INSTANCE_scale + vec3 is = 1.0/HdGetInstance_scale(level, /*default=*/vec3(1)); // inverse scale + m = MAT4(is.x, 0, 0, 0, + 0, is.y, 0, 0, + 0, 0, is.z, 0, + 0, 0, 0, 1) * m; +#endif + return m; +} + +// --------------------------------------------------------------------------- + +MAT4 GetInstanceTransform() +{ + MAT4 m = MAT4(1); +#ifdef HD_INSTANCER_NUM_LEVELS + for (int i = 0; i < HD_INSTANCER_NUM_LEVELS; ++i) { + m = GetInstanceTransform(i) * m; +#ifdef HD_HAS_instancerTransform + m = HdGet_instancerTransform(i) * m; +#endif + } +#endif + return m; +} + +MAT4 GetInstanceTransformInverse() +{ + MAT4 m = MAT4(1); +#ifdef HD_INSTANCER_NUM_LEVELS + for (int i = 0; i < HD_INSTANCER_NUM_LEVELS; ++i) { + m = m * GetInstanceTransformInverse(i); +#ifdef HD_HAS_instancerTransformInverse + m = m * HdGet_instancerTransformInverse(i); +#endif + } +#endif + return m; +} + +// --------------------------------------------------------------------------- + +MAT4 ApplyInstanceTransform(MAT4 m) +{ + return GetInstanceTransform() * m; +} + +MAT4 ApplyInstanceTransformInverse(MAT4 m) +{ + return m * GetInstanceTransformInverse(); +} + +bool IsFlipped() +{ +#if defined(HD_HAS_isFlipped) + bool flip = (HdGet_isFlipped() != 0); +#elif defined(HD_HAS_transform) + // The sign of the determinant indicates whether m flips handedness + bool flip = (determinant(HdGet_transform()) < 0.0); +#else + bool flip = false; +#endif + +#ifdef HD_HAS_INSTANCE_scale + for (int i = 0; i < HD_INSTANCER_NUM_LEVELS; ++i) { + vec3 scale = HdGetInstance_scale(i, /*default=*/vec3(1)); + flip = flip != ((sign(scale.x) * sign(scale.y) * sign(scale.z)) < 0); + } +#endif + +#ifdef HD_HAS_INSTANCE_instanceTransform + for (int i = 0; i < HD_INSTANCER_NUM_LEVELS; ++i) { + MAT4 m = HdGetInstance_instanceTransform(i, MAT4(1)); + flip = flip != (determinant(m) < 0.0); + } +#endif + + return flip; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/mesh.glslfx b/blender/lib/usd/hdSt/resources/shaders/mesh.glslfx new file mode 100644 index 0000000..4a89fd2 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/mesh.glslfx @@ -0,0 +1,1581 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/mesh.glslfx + +#import $TOOLS/hdSt/shaders/instancing.glslfx +#import $TOOLS/hdSt/shaders/meshFaceCull.glslfx +#import $TOOLS/hdSt/shaders/meshNormal.glslfx +#import $TOOLS/hdSt/shaders/meshWire.glslfx +#import $TOOLS/hdSt/shaders/terminals.glslfx +#import $TOOLS/hdSt/shaders/edgeId.glslfx +#import $TOOLS/hdSt/shaders/pointId.glslfx +#import $TOOLS/hdSt/shaders/visibility.glslfx + +--- -------------------------------------------------------------------------- +-- layout Mesh.Vertex + +[ + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Vertex + +// Fwd declare methods defined in pointId.glslfx, that are used below. +FORWARD_DECL(int GetPointId()); +FORWARD_DECL(float GetPointRasterSize(int)); +FORWARD_DECL(void ProcessPointId(int)); + +void main(void) +{ + ProcessPrimvarsIn(); + + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + vec4 point = vec4(HdGet_points().xyz, 1); + outData.Peye = vec4(GetWorldToViewMatrix() * transform * point); + + outData.Neye = GetNormal(vec3(0), 0); // normalized + + int pointId = GetPointId(); + gl_PointSize = GetPointRasterSize(pointId); + ProcessPointId(pointId); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.PostTessVertex.Triangle + +[ + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec3", "ptvsBarycentricCoord"], + ["out", "uint", "ptvsPatchId"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessVertex.Triangle + +vec4 GetPatchCoord(int index) +{ + vec2 uv[3]; + uv[0] = vec2(0, 0); // (0, 0, 1); + uv[1] = vec2(1, 0); // (1, 0, 0); + uv[2] = vec2(0, 1); // (0, 1, 0); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoordTriangle(uv[index], patchParam); +} + +// Fwd declare methods defined in pointId.glslfx, that are used below. +FORWARD_DECL(int GetPointId()); +FORWARD_DECL(float GetPointRasterSize(int)); +FORWARD_DECL(void ProcessPointId(int)); + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + int pointId = GetPointId(); + gl_PointSize = GetPointRasterSize(pointId); + ProcessPointId(pointId); + + vec4 point0 = GetWorldToViewMatrix() * transform * vec4(points[0],1.0); + vec4 point1 = GetWorldToViewMatrix() * transform * vec4(points[1],1.0); + vec4 point2 = GetWorldToViewMatrix() * transform * vec4(points[2],1.0); + + // Get the indata Neye if provided. + bool isFlipped = IsFlipped(); + vec3 Neye0 = isFlipped ? -GetNormal(vec3(0),0) : GetNormal(vec3(0),0); + vec3 Neye1 = isFlipped ? -GetNormal(vec3(0),1) : GetNormal(vec3(0),1); + vec3 Neye2 = isFlipped ? -GetNormal(vec3(0),2) : GetNormal(vec3(0),2); + + Neye0 = GetTriGeometryNormal(Neye0, point0, point1, point2, isFlipped); + Neye1 = GetTriGeometryNormal(Neye1, point0, point1, point2, isFlipped); + Neye2 = GetTriGeometryNormal(Neye2, point0, point1, point2, isFlipped); + + point0 = DisplacementTerminal(0, point0, Neye0, GetPatchCoord(0)); + point1 = DisplacementTerminal(1, point1, Neye1, GetPatchCoord(1)); + point2 = DisplacementTerminal(2, point2, Neye2, GetPatchCoord(2)); + + vec2 coord = gl_TessCoord.xy; + vec4 basis = vec4(coord.x, coord.y, 1.0f-coord.x-coord.y, 0.0f); + + outData.Peye = InterpolatePrimvar(point0, point1, point2, point0, basis); + outData.Neye = InterpolatePrimvar(Neye0, Neye1, Neye2, Neye0, basis); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ptvsBarycentricCoord = gl_TessCoord; + ptvsPatchId = patch_id; + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(basis, 0, 1, 2, 0); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.PostTessVertex.Quad + +[ + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec2", "ptvsBarycentricCoord"], + ["out", "uint", "ptvsPatchId"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessVertex.Quad + +vec4 GetPatchCoord(int index) +{ + vec2 uv[4]; + uv[0] = vec2(0, 0); + uv[1] = vec2(1, 0); + uv[2] = vec2(1, 1); + uv[3] = vec2(0, 1); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoord(uv[index], patchParam); +} + +// Fwd declare methods defined in pointId.glslfx, that are used below. +FORWARD_DECL(int GetPointId()); +FORWARD_DECL(float GetPointRasterSize(int)); +FORWARD_DECL(void ProcessPointId(int)); + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + int pointId = GetPointId(); + gl_PointSize = GetPointRasterSize(pointId); + ProcessPointId(pointId); + + vec4 point0 = GetWorldToViewMatrix() * transform * vec4(points[0],1.0); + vec4 point1 = GetWorldToViewMatrix() * transform * vec4(points[1],1.0); + vec4 point2 = GetWorldToViewMatrix() * transform * vec4(points[2],1.0); + vec4 point3 = GetWorldToViewMatrix() * transform * vec4(points[3],1.0); + + // Get the indata Neye if provided. + bool isFlipped = IsFlipped(); + vec3 Neye0 = isFlipped ? -GetNormal(vec3(0),0) : GetNormal(vec3(0),0); + vec3 Neye1 = isFlipped ? -GetNormal(vec3(0),1) : GetNormal(vec3(0),1); + vec3 Neye2 = isFlipped ? -GetNormal(vec3(0),2) : GetNormal(vec3(0),2); + vec3 Neye3 = isFlipped ? -GetNormal(vec3(0),3) : GetNormal(vec3(0),3); + + Neye0 = GetQuadGeometryNormal( + Neye0, point0, point1, point2, point3, isFlipped); + Neye1 = GetQuadGeometryNormal( + Neye1, point0, point1, point2, point3, isFlipped); + Neye2 = GetQuadGeometryNormal( + Neye2, point0, point1, point2, point3, isFlipped); + Neye3 = GetQuadGeometryNormal( + Neye3, point0, point1, point2, point3, isFlipped); + + point0 = DisplacementTerminal(0, point0, Neye0, GetPatchCoord(0)); + point1 = DisplacementTerminal(1, point1, Neye1, GetPatchCoord(1)); + point2 = DisplacementTerminal(2, point2, Neye2, GetPatchCoord(2)); + point3 = DisplacementTerminal(3, point3, Neye3, GetPatchCoord(3)); + + vec3 coord = gl_TessCoord.xy; + vec4 basis = vec4((1.0-coord.x) * (1.0-coord.y), coord.x * (1.0-coord.y), + (1.0-coord.x) * coord.y, coord.x * coord.y); + + outData.Peye = InterpolatePrimvar(point0, point1, point2, point3, basis); + outData.Neye = InterpolatePrimvar(Neye0, Neye1, Neye2, Neye3, basis); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ptvsBarycentricCoord = gl_TessCoord; + ptvsPatchId = patch_id; + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(basis, 0, 1, 2, 3); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.PostTessVertex.TriQuad + +[ + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec2", "ptvsBarycentricCoord"], + ["out", "uint", "ptvsPatchId"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessVertex.TriQuad + +vec4 GetPatchCoord(int index) +{ + vec2 uv[4]; + uv[0] = vec2(0, 0); + uv[1] = vec2(1, 0); + uv[2] = vec2(1, 1); + uv[3] = vec2(0, 1); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoord(uv[index], patchParam); +} + +// Fwd declare methods defined in pointId.glslfx, that are used below. +FORWARD_DECL(int GetPointId()); +FORWARD_DECL(float GetPointRasterSize(int)); +FORWARD_DECL(void ProcessPointId(int)); + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + int pointId = GetPointId(); + gl_PointSize = GetPointRasterSize(pointId); + ProcessPointId(pointId); + + vec4 point0 = GetWorldToViewMatrix() * transform * vec4(points[0],1.0); + vec4 point1 = GetWorldToViewMatrix() * transform * vec4(points[1],1.0); + vec4 point2 = GetWorldToViewMatrix() * transform * vec4(points[2],1.0); + vec4 point3 = GetWorldToViewMatrix() * transform * vec4(points[4],1.0); + + //Get the indata Neye if provided. + bool isFlipped = IsFlipped(); + vec3 Neye0 = isFlipped ? -GetNormal(vec3(0),0) : GetNormal(vec3(0),0); + vec3 Neye1 = isFlipped ? -GetNormal(vec3(0),1) : GetNormal(vec3(0),1); + vec3 Neye2 = isFlipped ? -GetNormal(vec3(0),2) : GetNormal(vec3(0),2); + vec3 Neye3 = isFlipped ? -GetNormal(vec3(0),4) : GetNormal(vec3(0),4); + + Neye0 = GetQuadGeometryNormal(Neye0, + point0, point1, point2, point3, isFlipped); + Neye1 = GetQuadGeometryNormal(Neye1, + point0, point1, point2, point3, isFlipped); + Neye2 = GetQuadGeometryNormal(Neye2, + point0, point1, point2, point3, isFlipped); + Neye3 = GetQuadGeometryNormal(Neye3, + point0, point1, point2, point3, isFlipped); + + point0 = DisplacementTerminal(0, point0, Neye0, GetPatchCoord(0)); + point1 = DisplacementTerminal(1, point1, Neye1, GetPatchCoord(1)); + point2 = DisplacementTerminal(2, point2, Neye2, GetPatchCoord(2)); + point3 = DisplacementTerminal(3, point3, Neye3, GetPatchCoord(3)); + + vec2 coord = gl_TessCoord.xy; + vec4 basis = vec4((1.0-coord.x) * (1.0-coord.y), coord.x * (1.0-coord.y), + (1.0-coord.x) * coord.y, coord.x * coord.y); + + outData.Peye = InterpolatePrimvar(point0, point1, point3, point2, basis); + outData.Neye = InterpolatePrimvar(Neye0, Neye1, Neye3, Neye2, basis); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ptvsBarycentricCoord = gl_TessCoord; + ptvsPatchId = patch_id; + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(basis, 0, 1, 2, 4); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.TessControl.BSplineQuad + +[ + ["out", "HD_NUM_PATCH_EVAL_VERTS"], + ["out", "vec4", "tessOuterLo", "patch"], + ["out", "vec4", "tessOuterHi", "patch"], + ["in block array", "VertexData", "inpt", "gl_MaxPatchVertices", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block array", "VertexDataTess", "outpt", "HD_NUM_PATCH_EVAL_VERTS", + ["OsdPerPatchVertexBezier", "v"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.TessControl.BSplineQuad + +void main(void) +{ + vec3 cv[HD_NUM_PATCH_VERTS]; + for (int i = 0; i < HD_NUM_PATCH_VERTS; ++i) { + cv[i] = inpt[i].Peye.xyz; + } + + ivec3 patchParam = GetPatchParam(); + + OsdComputePerPatchVertexBSpline(patchParam, gl_InvocationID, cv, + outpt[gl_InvocationID].v); + + // Wait for all basis conversion to be finished + barrier(); + + if (gl_InvocationID == 0) { + vec4 tessLevelOuter = vec4(0); + vec2 tessLevelInner = vec2(0); + + // Gather bezier control points to compute limit surface tess levels + OsdPerPatchVertexBezier cpBezier[HD_NUM_PATCH_EVAL_VERTS]; + for (int i = 0; i < HD_NUM_PATCH_EVAL_VERTS; ++i) { + cpBezier[i] = outpt[i].v; + } + OsdEvalPatchBezierTessLevels(cpBezier, patchParam, + tessLevelOuter, tessLevelInner, + tessOuterLo, tessOuterHi); + + gl_TessLevelOuter[0] = tessLevelOuter[0]; + gl_TessLevelOuter[1] = tessLevelOuter[1]; + gl_TessLevelOuter[2] = tessLevelOuter[2]; + gl_TessLevelOuter[3] = tessLevelOuter[3]; + + gl_TessLevelInner[0] = tessLevelInner[0]; + gl_TessLevelInner[1] = tessLevelInner[1]; + } + + ProcessPrimvarsOut(); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.TessEval.BezierQuad + +# XXX: due to NVIDIA shader compiler bug (filed as 1687344) +# we can't put patchCoord into interface block. +[ + ["in", "quads"], + ["in", "vec4", "tessOuterLo", "patch"], + ["in", "vec4", "tessOuterHi", "patch"], + ["in block array", "VertexDataTess", "inpt", "gl_MaxPatchVertices", + ["OsdPerPatchVertexBezier", "v"] + ], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec4", "tesPatchCoord"], + ["out", "vec2", "tesTessCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.TessEval.BezierQuad + +void main(void) +{ + OsdPerPatchVertexBezier cv[16]; + for (int i = 0; i < 16; ++i) { + cv[i] = inpt[i].v; + } + vec2 UV = OsdGetTessParameterization(gl_TessCoord.xy, + tessOuterLo, + tessOuterHi); + + vec3 P = vec3(0), dPu = vec3(0), dPv = vec3(0); + vec3 N = vec3(0), dNu = vec3(0), dNv = vec3(0); + + ivec3 patchParam = inpt[0].v.patchParam; + OsdEvalPatchBezier(patchParam, UV, cv, P, dPu, dPv, N, dNu, dNv); + + outData.Peye = vec4(P, 1); + outData.Neye = N; // normalized + + tesPatchCoord = OsdInterpolatePatchCoord(UV, patchParam); + tesTessCoord = UV; + + // Bilinear basis + vec4 basis = vec4( + (1.0-UV.x) * (1.0-UV.y), UV.x * (1.0-UV.y), + (1.0-UV.x) * UV.y, UV.x * UV.y ); + + ProcessPrimvarsOut(basis, 5, 6, 9, 10, UV); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.TessControl.BoxSplineTriangle + +[ + ["out", "HD_NUM_PATCH_EVAL_VERTS"], + ["out", "vec4", "tessOuterLo", "patch"], + ["out", "vec4", "tessOuterHi", "patch"], + ["in block array", "VertexData", "inpt", "gl_MaxPatchVertices", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block array", "VertexDataTess", "outpt", "HD_NUM_PATCH_EVAL_VERTS", + ["OsdPerPatchVertexBezier", "v"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.TessControl.BoxSplineTriangle + +void main(void) +{ + vec3 cv[HD_NUM_PATCH_VERTS]; + for (int i = 0; i < HD_NUM_PATCH_VERTS; ++i) { + cv[i] = inpt[i].Peye.xyz; + } + + ivec3 patchParam = GetPatchParam(); + + OsdComputePerPatchVertexBoxSplineTriangle(patchParam, gl_InvocationID, cv, + outpt[gl_InvocationID].v); + + // Wait for all basis conversion to be finished + barrier(); + + if (gl_InvocationID == 0) { + vec4 tessLevelOuter = vec4(0); + vec2 tessLevelInner = vec2(0); + + // Gather bezier control points to compute limit surface tess levels + vec3 cpBezier[HD_NUM_PATCH_EVAL_VERTS]; + for (int i = 0; i < HD_NUM_PATCH_EVAL_VERTS; ++i) { + cpBezier[i] = outpt[i].v.P; + } + OsdEvalPatchBezierTriangleTessLevels(cpBezier, patchParam, + tessLevelOuter, tessLevelInner, + tessOuterLo, tessOuterHi); + + gl_TessLevelOuter[0] = tessLevelOuter[0]; + gl_TessLevelOuter[1] = tessLevelOuter[1]; + gl_TessLevelOuter[2] = tessLevelOuter[2]; + + gl_TessLevelInner[0] = tessLevelInner[0]; + } + + ProcessPrimvarsOut(); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.TessEval.BezierTriangle + +# XXX: due to NVIDIA shader compiler bug (filed as 1687344) +# we can't put patchCoord into interface block. +[ + ["in", "triangles"], + ["in", "vec4", "tessOuterLo", "patch"], + ["in", "vec4", "tessOuterHi", "patch"], + ["in block array", "VertexDataTess", "inpt", "gl_MaxPatchVertices", + ["OsdPerPatchVertexBezier", "v"] + ], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec4", "tesPatchCoord"], + ["out", "vec2", "tesTessCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.TessEval.BezierTriangle + +void main(void) +{ + OsdPerPatchVertexBezier cv[15]; + for (int i = 0; i < 15; ++i) { + cv[i] = inpt[i].v; + } + vec2 UV = OsdGetTessParameterizationTriangle(gl_TessCoord.xyz, + tessOuterLo, + tessOuterHi); + + vec3 P = vec3(0), dPu = vec3(0), dPv = vec3(0); + vec3 N = vec3(0), dNu = vec3(0), dNv = vec3(0); + + ivec3 patchParam = inpt[0].v.patchParam; + OsdEvalPatchBezierTriangle(patchParam, UV, cv, P, dPu, dPv, N, dNu, dNv); + + outData.Peye = vec4(P, 1); + outData.Neye = N; // normalized + + tesPatchCoord = OsdInterpolatePatchCoordTriangle(UV, patchParam); + tesTessCoord = UV; + + // Barycentric basis + vec4 basis = vec4( + (1.0f-UV.x-UV.y), UV.x, UV.y, 0.0f); + + ProcessPrimvarsOut(basis, 4, 5, 8, 0, UV); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessControl.BSplineQuad + +vec3 GetPosAtUv(vec2 uv, OsdPatchParam param, thread vec3 *cv) +{ + float wP[HD_NUM_PATCH_VERTS]; + float wDs[HD_NUM_PATCH_VERTS]; + float wDt[HD_NUM_PATCH_VERTS]; + float wDss[HD_NUM_PATCH_VERTS]; + float wDst[HD_NUM_PATCH_VERTS]; + float wDtt[HD_NUM_PATCH_VERTS]; + OsdEvaluatePatchBasisNormalized(OSD_PATCH_DESCRIPTOR_REGULAR, + param, uv.x, uv.y, + &(wP[0]), &(wDs[0]), &(wDt[0]), &(wDss[0]), &(wDst[0]), &(wDtt[0])); + + vec3 pos = vec3(0.0, 0.0, 0.0); + for (int i = 0; i < 16; ++i) { + pos += cv[i] * wP[i]; + } + return pos; +} + +vec4 GetPatchCoord(int index) +{ + vec2 uv[4]; + uv[0] = vec2(0, 0); + uv[1] = vec2(1, 0); + uv[2] = vec2(1, 1); + uv[3] = vec2(0, 1); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoord(uv[index], patchParam); +} + +void main(void) +{ + MAT4 transform = + GetWorldToViewMatrix() * ApplyInstanceTransform(HdGet_transform()); + + ivec3 patchParam = GetPatchParam(); + OsdPatchParam osdParam = OsdPatchParamInit(patchParam.x, patchParam.y, 0); + + const vec2 corner0Uv = vec2(0.0, 0.0); + const vec2 corner1Uv = vec2(1.0, 0.0); + const vec2 corner2Uv = vec2(1.0, 1.0); + const vec2 corner3Uv = vec2(0.0, 1.0); + + vec3 corners[4]; + corners[0] = GetPosAtUv(corner0Uv, osdParam, &(points[0])); + corners[1] = GetPosAtUv(corner1Uv, osdParam, &(points[0])); + corners[2] = GetPosAtUv(corner2Uv, osdParam, &(points[0])); + corners[3] = GetPosAtUv(corner3Uv, osdParam, &(points[0])); + + int transitionMask = OsdGetPatchTransitionMask(patchParam); + vec3 midPoints[4]; + midPoints[0] = ((transitionMask & 8) == 0) + ? float3(0) + : GetPosAtUv(float2(0.0, 0.5), osdParam, &(points[0])); + midPoints[1] = ((transitionMask & 1) == 0) + ? float3(0) + : GetPosAtUv(float2(0.5, 0.0), osdParam, &(points[0])); + midPoints[2] = ((transitionMask & 2) == 0) + ? float3(0) + : GetPosAtUv(float2(1.0, 0.5), osdParam, &(points[0])); + midPoints[3] = ((transitionMask & 4) == 0) + ? float3(0) + : GetPosAtUv(float2(0.5, 1.0), osdParam, &(points[0])); + + vec4 tessLevelOuter = vec4(0); + vec2 tessLevelInner = vec2(0); + vec4 tessOuterLo = vec4(0); + vec4 tessOuterHi = vec4(0); + + // Gather bezier control points to compute limit surface tess levels + Osd_GetTessLevelsFromPatchBoundaries4( + GetTessLevel(), GetProjectionMatrix(), transform, + corners, midPoints, patchParam, tessOuterLo, tessOuterHi); + + OsdComputeTessLevels(tessOuterLo, tessOuterHi, + tessLevelOuter, tessLevelInner); + + device half *tessAsHalf = (device half *)tessFactors + patch_id * 6; + + tessAsHalf[0] = half(tessLevelOuter[0]); + tessAsHalf[1] = half(tessLevelOuter[1]); + tessAsHalf[2] = half(tessLevelOuter[2]); + tessAsHalf[3] = half(tessLevelOuter[3]); + + tessAsHalf[4] = half(tessLevelInner[0]); + tessAsHalf[5] = half(tessLevelInner[1]); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.PostTessVertex.BSplineQuad + +[ + ["in", "equal_spacing"], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec4", "tesPatchCoord"], + ["out", "vec2", "tesTessCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessVertex.BSplineQuad + +vec2 GetPatchTessUV() +{ + const ivec3 patchParam = GetPatchParam(); + + const int refinementLevel = OsdGetPatchRefinementLevel(patchParam); + const float tessLevel = min(GetTessLevel(), + (float)OSD_MAX_TESS_LEVEL) / exp2((float)refinementLevel - 1); + + vec4 tessOuterLo(0), tessOuterHi(0); + OsdGetTessLevelsUniform(tessLevel, patchParam, tessOuterLo, tessOuterHi); + + return OsdGetTessParameterization(gl_TessCoord.xy, + tessOuterLo, + tessOuterHi); +} + +vec4 GetPatchCoord(int index) +{ + return OsdInterpolatePatchCoord(GetPatchTessUV(), GetPatchParam()); +} + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + int pointId = GetPointId(); + gl_PointSize = GetPointRasterSize(pointId); + ProcessPointId(pointId); + + const ivec3 patchParam = GetPatchParam(); + const vec2 UV = GetPatchTessUV(); + + float wP[HD_NUM_PATCH_VERTS]; + float wDs[HD_NUM_PATCH_VERTS]; + float wDt[HD_NUM_PATCH_VERTS]; + float wDss[HD_NUM_PATCH_VERTS]; + float wDst[HD_NUM_PATCH_VERTS]; + float wDtt[HD_NUM_PATCH_VERTS]; + OsdPatchParam osdParam = OsdPatchParamInit(patchParam.x, patchParam.y, 0); + OsdEvaluatePatchBasisNormalized(OSD_PATCH_DESCRIPTOR_REGULAR, + osdParam, UV.x, UV.y, + &(wP[0]), &(wDs[0]), &(wDt[0]), &(wDss[0]), &(wDst[0]), &(wDtt[0])); + + vec3 P = vec3(0.0); + vec3 N = vec3(0.0); + + vec3 pDs = vec3(0.0); + vec3 pDt = vec3(0.0); + + for (int i = 0; i < 16; ++i) { + P += points[i] * wP[i]; + pDs += points[i] * wDs[i]; + pDt += points[i] * wDt[i]; + } + + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + N = normalize(cross(pDs,pDt)); + N = vec4(transpose(transformInv * GetWorldToViewInverseMatrix()) * + vec4(N,0)).xyz; + + if (length(N) > 0.0) { + N = normalize(N); + } + + tesPatchCoord = OsdInterpolatePatchCoord(UV, patchParam); + tesTessCoord = UV; + + P = (GetWorldToViewMatrix() * transform * vec4(P,1.0)).xyz; + P = (DisplacementTerminal(0, vec4(P,1.0), N, tesPatchCoord)).xyz; + + outData.Peye = vec4(P, 1); + outData.Neye = N; // normalized + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + // Bilinear basis + vec4 basis = vec4( + (1.0-UV.x) * (1.0-UV.y), UV.x * (1.0-UV.y), + (1.0-UV.x) * UV.y, UV.x * UV.y ); + + ProcessPrimvarsOut(basis, 5, 6, 9, 10, UV); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessControl.BoxSplineTriangle + +vec3 GetPosAtUv(vec2 uv, OsdPatchParam param, thread vec3 *cv) +{ + float wP[HD_NUM_PATCH_VERTS]; + float wDs[HD_NUM_PATCH_VERTS]; + float wDt[HD_NUM_PATCH_VERTS]; + float wDss[HD_NUM_PATCH_VERTS]; + float wDst[HD_NUM_PATCH_VERTS]; + float wDtt[HD_NUM_PATCH_VERTS]; + OsdEvaluatePatchBasisNormalized(OSD_PATCH_DESCRIPTOR_LOOP, + param, uv.x, uv.y, + &(wP[0]), &(wDs[0]), &(wDt[0]), &(wDss[0]), &(wDst[0]), &(wDtt[0])); + + vec3 pos = vec3(0.0, 0.0, 0.0); + for (int i = 0; i < 12; ++i) { + pos += cv[i] * wP[i]; + } + return pos; +} + +vec4 GetPatchCoord(int index) +{ + vec2 uv[3]; + uv[0] = vec2(0, 0); // (0, 0, 1); + uv[1] = vec2(1, 0); // (1, 0, 0); + uv[2] = vec2(0, 1); // (0, 1, 0); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoordTriangle(uv[index], patchParam); +} + +void main(void) +{ + MAT4 transform = + GetWorldToViewMatrix() * ApplyInstanceTransform(HdGet_transform()); + + ivec3 patchParam = GetPatchParam(); + OsdPatchParam osdParam = OsdPatchParamInit(patchParam.x, patchParam.y, 0); + + const vec3 corner0Uv = vec3(0.0, 0.0, 1.0); + const vec3 corner1Uv = vec3(1.0, 0.0, 0.0); + const vec3 corner2Uv = vec3(0.0, 1.0, 0.0); + + vec3 corners[3]; + corners[0] = GetPosAtUv(corner0Uv.xy, osdParam, &(points[0])); + corners[1] = GetPosAtUv(corner1Uv.xy, osdParam, &(points[0])); + corners[2] = GetPosAtUv(corner2Uv.xy, osdParam, &(points[0])); + + int transitionMask = OsdGetPatchTransitionMask(patchParam); + vec3 midPoints[3]; + midPoints[0] = ((transitionMask & 4) == 0) + ? float3(0) + : GetPosAtUv(((corner2Uv + corner0Uv)/2.0).xy, osdParam, &(points[0])); + midPoints[1] = ((transitionMask & 1) == 0) + ? float3(0) + : GetPosAtUv(((corner0Uv + corner1Uv)/2.0).xy, osdParam, &(points[0])); + midPoints[2] = ((transitionMask & 2) == 0) + ? float3(0) + : GetPosAtUv(((corner1Uv + corner2Uv)/2.0).xy, osdParam, &(points[0])); + + vec4 tessLevelOuter = vec4(0); + vec2 tessLevelInner = vec2(0); + vec4 tessOuterLo = vec4(0); + vec4 tessOuterHi = vec4(0); + Osd_GetTessLevelsFromPatchBoundaries3( + GetTessLevel(), GetProjectionMatrix(), transform, + corners, midPoints, patchParam, tessOuterLo, tessOuterHi); + + OsdComputeTessLevelsTriangle(tessOuterLo, tessOuterHi, + tessLevelOuter, tessLevelInner); + + device half *tessAsHalf = (device half *)tessFactors + patch_id * 4; + + tessAsHalf[0] = half(tessLevelOuter[0]); + tessAsHalf[1] = half(tessLevelOuter[1]); + tessAsHalf[2] = half(tessLevelOuter[2]); + + tessAsHalf[3] = half(tessLevelInner[0]); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.PostTessVertex.BoxSplineTriangle + +[ + ["in", "equal_spacing"], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec4", "tesPatchCoord"], + ["out", "vec2", "tesTessCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessVertex.BoxSplineTriangle + +vec2 GetPatchTessUV() +{ + const ivec3 patchParam = GetPatchParam(); + + const int refinementLevel = OsdGetPatchRefinementLevel(patchParam); + const float tessLevel = min(GetTessLevel(), + (float)OSD_MAX_TESS_LEVEL) / exp2((float)refinementLevel - 1); + + vec4 tessOuterLo(0), tessOuterHi(0); + OsdGetTessLevelsUniform(tessLevel, patchParam, tessOuterLo, tessOuterHi); + + return OsdGetTessParameterizationTriangle(gl_TessCoord.xyz, + tessOuterLo, + tessOuterHi); +} + +vec4 GetPatchCoord(int index) +{ + return OsdInterpolatePatchCoordTriangle(GetPatchTessUV(), GetPatchParam()); +} + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + int pointId = GetPointId(); + gl_PointSize = GetPointRasterSize(pointId); + ProcessPointId(pointId); + + const ivec3 patchParam = GetPatchParam(); + const vec2 UV = GetPatchTessUV(); + + float wP[HD_NUM_PATCH_VERTS]; + float wDs[HD_NUM_PATCH_VERTS]; + float wDt[HD_NUM_PATCH_VERTS]; + float wDss[HD_NUM_PATCH_VERTS]; + float wDst[HD_NUM_PATCH_VERTS]; + float wDtt[HD_NUM_PATCH_VERTS]; + OsdPatchParam osdParam = OsdPatchParamInit(patchParam.x, patchParam.y, 0); + OsdEvaluatePatchBasisNormalized(OSD_PATCH_DESCRIPTOR_LOOP, + osdParam, UV.x, UV.y, + &(wP[0]), &(wDs[0]), &(wDt[0]), &(wDss[0]), &(wDst[0]), &(wDtt[0])); + + vec3 P = vec3(0.0); + vec3 N = vec3(0.0); + + vec3 pDs = vec3(0.0); + vec3 pDt = vec3(0.0); + + for (int i = 0; i < 12; ++i) { + P += points[i] * wP[i]; + pDs += points[i] * wDs[i]; + pDt += points[i] * wDt[i]; + } + + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + N = normalize(cross(pDs,pDt)); + N = vec4(transpose(transformInv * GetWorldToViewInverseMatrix()) * + vec4(N,0)).xyz; + + if (length(N) > 0.0) { + N = normalize(N); + } + + tesPatchCoord = OsdInterpolatePatchCoordTriangle(UV, patchParam); + tesTessCoord = UV; + + P = (GetWorldToViewMatrix() * transform * vec4(P,1.0)).xyz; + P = (DisplacementTerminal(0, vec4(P,1.0), N, tesPatchCoord)).xyz; + + outData.Peye = vec4(P, 1); + outData.Neye = N; // normalized + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + // Barycentric basis + vec4 basis = vec4( + (1.0f-UV.x-UV.y), UV.x, UV.y, 0.0f); + + ProcessPrimvarsOut(basis, 4, 5, 8, 0, UV); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Geometry.TriangleTess + +[ + ["in", "triangles"], + ["out", "triangle_strip"], + ["out", "3"], + ["in block array", "VertexData", "inData", "3", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["in array", "vec4", "tesPatchCoord", "3"], + ["in array", "vec2", "tesTessCoord", "3"], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec4", "gsPatchCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Geometry.TriangleTess + +vec4 GetPatchCoord(int index) +{ + return tesPatchCoord[index]; +} + +void emit(int index, vec4 Peye, vec3 Neye) +{ + outData.Peye = Peye; + outData.Neye = Neye; + gsPatchCoord = GetPatchCoord(index); + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(index, tesTessCoord[index]); + + EmitVertex(); +} + +FORWARD_DECL(vec4 ComputeSelectionOffset()); // selection.glslfx + +void main(void) +{ + gl_PrimitiveID = gl_PrimitiveIDIn; + + bool isFlipped = IsFlipped(); // consider handedness AND negative-scale + + vec3 Neye0 = isFlipped ? -inData[0].Neye : inData[0].Neye; + Neye0 = GetNormal(Neye0, 0, tesTessCoord[0]); + Neye0 = GetTriGeometryNormal(Neye0, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + vec3 Neye1 = isFlipped ? -inData[1].Neye : inData[1].Neye; + Neye1 = GetNormal(Neye1, 0, tesTessCoord[1]); + Neye1 = GetTriGeometryNormal(Neye1, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + vec3 Neye2 = isFlipped ? -inData[2].Neye : inData[2].Neye; + Neye2 = GetNormal(Neye2, 0, tesTessCoord[2]); + Neye2 = GetTriGeometryNormal(Neye2, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + + vec4 Peye0 = DisplacementTerminal( + 0, inData[0].Peye, Neye0, GetPatchCoord(0)); + vec4 Peye1 = DisplacementTerminal( + 1, inData[1].Peye, Neye1, GetPatchCoord(1)); + vec4 Peye2 = DisplacementTerminal( + 2, inData[2].Peye, Neye2, GetPatchCoord(2)); + + // For wireframe, add a polygon offset to selected faces to ensure they + // rasterize over unselected faces. + vec4 selOffset = ComputeSelectionOffset(); + Peye0 += selOffset; + Peye1 += selOffset; + Peye2 += selOffset; + + // triangle 0: vertices (0,1,2) + emit(0, Peye0, Neye0); + emit(1, Peye1, Neye1); + emit(2, Peye2, Neye2); + + EndPrimitive(); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.TessEval.VaryingInterpolation + +float InterpolatePrimvar(float inPv0, float inPv1, float inPv2, float inPv3, + vec4 basis, vec2 uv) +{ + return basis[0] * inPv0 + + basis[1] * inPv1 + + basis[2] * inPv2 + + basis[3] * inPv3; +} + +vec2 InterpolatePrimvar(vec2 inPv0, vec2 inPv1, vec2 inPv2, vec2 inPv3, + vec4 basis, vec2 uv) +{ + return basis[0] * inPv0 + + basis[1] * inPv1 + + basis[2] * inPv2 + + basis[3] * inPv3; +} + +vec3 InterpolatePrimvar(vec3 inPv0, vec3 inPv1, vec3 inPv2, vec3 inPv3, + vec4 basis, vec2 uv) +{ + return basis[0] * inPv0 + + basis[1] * inPv1 + + basis[2] * inPv2 + + basis[3] * inPv3; +} + +vec4 InterpolatePrimvar(vec4 inPv0, vec4 inPv1, vec4 inPv2, vec4 inPv3, + vec4 basis, vec2 uv) +{ + return basis[0] * inPv0 + + basis[1] * inPv1 + + basis[2] * inPv2 + + basis[3] * inPv3; +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Geometry.Triangle + +[ + ["in", "triangles"], + ["out", "triangle_strip"], + ["out", "3"], + ["in block array", "VertexData", "inData", "3", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec4", "gsPatchCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Geometry.Triangle + +vec4 GetPatchCoord(int index) +{ + vec2 uv[3]; + uv[0] = vec2(0, 0); // (0, 0, 1); + uv[1] = vec2(1, 0); // (1, 0, 0); + uv[2] = vec2(0, 1); // (0, 1, 0); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoordTriangle(uv[index], patchParam); +} + +void emit(int index, vec4 Peye, vec3 Neye) +{ + outData.Peye = Peye; + outData.Neye = Neye; + + gsPatchCoord = GetPatchCoord(index); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(index); + + EmitVertex(); +} + +FORWARD_DECL(vec4 ComputeSelectionOffset()); // selection.glslfx + +void main(void) +{ + gl_PrimitiveID = gl_PrimitiveIDIn; + + bool isFlipped = IsFlipped(); // consider handedness AND negative-scale + + vec3 Neye0 = GetNormal(inData[0].Neye, 0); + Neye0 = GetTriGeometryNormal(Neye0, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + vec3 Neye1 = GetNormal(inData[1].Neye, 1); + Neye1 = GetTriGeometryNormal(Neye1, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + vec3 Neye2 = GetNormal(inData[2].Neye, 2); + Neye2 = GetTriGeometryNormal(Neye2, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + + vec4 Peye0 = DisplacementTerminal( + 0, inData[0].Peye, Neye0, GetPatchCoord(0)); + vec4 Peye1 = DisplacementTerminal( + 1, inData[1].Peye, Neye1, GetPatchCoord(1)); + vec4 Peye2 = DisplacementTerminal( + 2, inData[2].Peye, Neye2, GetPatchCoord(2)); + + // For wireframe, add a polygon offset to selected faces to ensure they + // rasterize over unselected faces. + vec4 selOffset = ComputeSelectionOffset(); + Peye0 += selOffset; + Peye1 += selOffset; + Peye2 += selOffset; + + // triangle 0: vertices (0,1,2) + emit(0, Peye0, Neye0); + emit(1, Peye1, Neye1); + emit(2, Peye2, Neye2); + + EndPrimitive(); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Geometry.TriQuad + +[ + ["in", "triangles"], + ["out", "triangle_strip"], + ["out", "3"], + ["in block array", "VertexData", "inData", "3", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Geometry.TriQuad + +vec4 GetPatchCoord(int index) +{ + vec2 uv[4]; + uv[0] = vec2(0, 0); + uv[1] = vec2(1, 0); + uv[2] = vec2(1, 1); + uv[3] = vec2(0, 1); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoord(uv[index], patchParam); +} + +void emit(int index, vec4 Peye, vec3 Neye) +{ + outData.Peye = Peye; + outData.Neye = Neye; + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(index); + + EmitVertex(); +} + +FORWARD_DECL(vec4 ComputeSelectionOffset()); // selection.glslfx + +void main(void) +{ + gl_PrimitiveID = gl_PrimitiveIDIn; + + bool isFlipped = IsFlipped(); // consider handedness AND negative-scale + + vec3 Neye0 = GetNormal(inData[0].Neye, 0); + Neye0 = GetTriGeometryNormal(Neye0, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + vec3 Neye1 = GetNormal(inData[1].Neye, 1); + Neye1 = GetTriGeometryNormal(Neye1, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + vec3 Neye2 = GetNormal(inData[2].Neye, 2); + Neye2 = GetTriGeometryNormal(Neye2, inData[0].Peye, inData[1].Peye, + inData[2].Peye, isFlipped); + + vec4 Peye0 = DisplacementTerminal( + 0, inData[0].Peye, Neye0, GetPatchCoord(0)); + vec4 Peye1 = DisplacementTerminal( + 1, inData[1].Peye, Neye1, GetPatchCoord(1)); + vec4 Peye2 = DisplacementTerminal( + 2, inData[2].Peye, Neye2, GetPatchCoord(2)); + + // For wireframe, add a polygon offset to selected faces to ensure they + // rasterize over unselected faces. + vec4 selOffset = ComputeSelectionOffset(); + Peye0 += selOffset; + Peye1 += selOffset; + Peye2 += selOffset; + + // triangle 0: vertices (0,1,2) + emit(0, Peye0, Neye0); + emit(1, Peye1, Neye1); + emit(2, Peye2, Neye2); + EndPrimitive(); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Geometry.Quad + +[ + ["in", "lines_adjacency"], + ["out", "triangle_strip"], + ["out", "6"], + ["in block array", "VertexData", "inData", "4", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out block", "VertexData", "outData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ], + ["out", "vec4", "gsPatchCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Geometry.Quad + +vec4 GetPatchCoord(int index) +{ + vec2 uv[4]; + uv[0] = vec2(0, 0); + uv[1] = vec2(1, 0); + uv[2] = vec2(1, 1); + uv[3] = vec2(0, 1); + + ivec3 patchParam = GetPatchParam(); + return OsdInterpolatePatchCoord(uv[index], patchParam); +} + +void emit(int index, vec4 Peye, vec3 Neye) +{ + outData.Peye = Peye; + outData.Neye = Neye; + + gsPatchCoord = GetPatchCoord(index); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + ProcessPrimvarsOut(index); + + EmitVertex(); +} + +FORWARD_DECL(vec4 ComputeSelectionOffset()); // selection.glslfx + +void main(void) +{ + gl_PrimitiveID = gl_PrimitiveIDIn; + + bool isFlipped = IsFlipped(); // consider handedness AND negative-scale + + vec3 Neye0 = GetNormal(inData[0].Neye, 0); + Neye0 = GetQuadGeometryNormal(Neye0, inData[0].Peye, inData[1].Peye, + inData[2].Peye, inData[3].Peye, isFlipped); + vec3 Neye1 = GetNormal(inData[1].Neye, 1); + Neye1 = GetQuadGeometryNormal(Neye1, inData[0].Peye, inData[1].Peye, + inData[2].Peye, inData[3].Peye, isFlipped); + vec3 Neye2 = GetNormal(inData[2].Neye, 2); + Neye2 = GetQuadGeometryNormal(Neye2, inData[0].Peye, inData[1].Peye, + inData[2].Peye, inData[3].Peye, isFlipped); + vec3 Neye3 = GetNormal(inData[3].Neye, 3); + Neye3 = GetQuadGeometryNormal(Neye3, inData[0].Peye, inData[1].Peye, + inData[2].Peye, inData[3].Peye, isFlipped); + + vec4 Peye0 = DisplacementTerminal( + 0, inData[0].Peye, Neye0, GetPatchCoord(0)); + vec4 Peye1 = DisplacementTerminal( + 1, inData[1].Peye, Neye1, GetPatchCoord(1)); + vec4 Peye2 = DisplacementTerminal( + 2, inData[2].Peye, Neye2, GetPatchCoord(2)); + vec4 Peye3 = DisplacementTerminal( + 3, inData[3].Peye, Neye3, GetPatchCoord(3)); + + // Generate triangles (3,0,2) and (1,2,0) + // 3---2 + // | .| + // | . | + // |. | + // 0---1 + // The indices post-quadrangulation/subdivision follow the convention: + // 0 -> original (hull) vertex + // 1,3 -> edge vertices + // 2 -> center vertex + // + // By having index 2 in both the triangles, we ensure the pre-quadrangulated + // face's normal (at the center) is part of the rasterizer interpolation, + // which matters when we use smooth/limit normals. + // In the case of flat normals, we use the vertex positions, so it doesn't + // matter. + + // For wireframe, add a polygon offset to selected faces to ensure they + // rasterize over unselected faces. + vec4 selOffset = ComputeSelectionOffset(); + Peye0 += selOffset; + Peye1 += selOffset; + Peye2 += selOffset; + Peye3 += selOffset; + + // triangle 0: vertices (3,0,2) + emit(3, Peye3, Neye3); + emit(0, Peye0, Neye0); + emit(2, Peye2, Neye2); + EndPrimitive(); + + // triangle 1: vertices (1,2,0) + gl_PrimitiveID = gl_PrimitiveIDIn; + emit(1, Peye1, Neye1); + emit(2, Peye2, Neye2); + emit(0, Peye0, Neye0); + EndPrimitive(); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.ControlPointBarycentric + +vec3 GetPatchControlPoint() +{ + return GetBarycentricCoord(); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Fragment.PatchCoord.ControlPointTessCoord.Triangle + +[ + ["in", "vec3", "ptvsBarycentricCoord"], + ["in", "uint", "ptvsPatchId"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.ControlPointTessCoord.Triangle + +vec3 GetPatchControlPoint() +{ + return ptvsBarycentricCoord; +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Fragment.PatchCoord.ControlPointTessCoord.Quad + +[ + ["in", "vec2", "ptvsBarycentricCoord"], + ["in", "uint", "ptvsPatchId"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.ControlPointTessCoord.Quad + +vec3 GetPatchControlPoint() +{ + return vec3(0.0, ptvsBarycentricCoord.y, ptvsBarycentricCoord.x); +} + + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessellationVertex.PatchCoord.Triangle + +vec2 GetPatchCoordLocalST() +{ + vec3 barycentric = gl_TessCoord; + return barycentric.yz; +} + +vec4 GetInterpolatedPatchCoord() +{ + return OsdInterpolatePatchCoordTriangle( + GetPatchCoordLocalST(), GetPatchParam()); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessellationVertex.PatchCoord.Quad + + +vec2 GetPatchCoordLocalST() +{ + return gl_TessCoord; +} + +vec4 GetInterpolatedPatchCoord() +{ + return OsdInterpolatePatchCoordTriangle( + GetPatchCoordLocalST(), GetPatchParam()); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.PostTessellationVertex.PatchCoord.TriQuad + +vec2 GetPatchCoordLocalST() +{ + return gl_TessCoord; +} + +vec4 GetInterpolatedPatchCoord() +{ + return OsdInterpolatePatchCoord(GetPatchCoordLocalST(), GetPatchParam()); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Fragment.PatchCoord + +[ + ["in", "vec4", "gsPatchCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord + +vec4 GetInterpolatedPatchCoord() +{ + return gsPatchCoord; +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Fragment.PatchCoord.Tess + +[ + ["in", "vec4", "tesPatchCoord"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.Tess + +vec4 GetInterpolatedPatchCoord() +{ + return tesPatchCoord; +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.Triangle + +vec2 GetPatchCoordLocalST() +{ + vec3 barycentric = GetPatchControlPoint(); + return barycentric.yz; +} + +vec4 GetInterpolatedPatchCoord() +{ + return OsdInterpolatePatchCoordTriangle( + GetPatchCoordLocalST(), GetPatchParam()); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.Quad + +vec2 GetPatchCoordLocalST() +{ + vec3 barycentric = GetPatchControlPoint(); + return barycentric.yz; +} + +vec4 GetInterpolatedPatchCoord() +{ + return OsdInterpolatePatchCoord(GetPatchCoordLocalST(), GetPatchParam()); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Fragment.PatchCoord.TriQuadPTVS + +[ + ["in", "vec2", "ptvsBarycentricCoord"], + ["in", "uint", "ptvsPatchId"] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.TriQuadPTVS + +vec2 GetPatchCoordLocalST() +{ + return ptvsBarycentricCoord; +} + +vec4 GetInterpolatedPatchCoord() +{ + return OsdInterpolatePatchCoord(GetPatchCoordLocalST(), GetPatchParam()); +} + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment.PatchCoord.TriQuad + +vec2 GetPatchCoordLocalST() +{ + vec3 barycentric = GetPatchControlPoint(); + if (GetTriQuadID() == 0) { + vec2 uv[3] = { vec2(0,0), vec2(1,0), vec2(1,1) }; + return uv[0]*barycentric.x + uv[1]*barycentric.y + uv[2]*barycentric.z; + } else { + vec2 uv[3] = { vec2(1,1), vec2(0,1), vec2(0,0) }; + return uv[0]*barycentric.x + uv[1]*barycentric.y + uv[2]*barycentric.z; + } +} + +vec4 GetInterpolatedPatchCoord() +{ + return OsdInterpolatePatchCoord(GetPatchCoordLocalST(), GetPatchParam()); +} + +--- -------------------------------------------------------------------------- +-- layout Mesh.Fragment + +[ + ["in block", "VertexData", "inData", + ["vec4", "Peye"], + ["vec3", "Neye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Mesh.Fragment + +#ifndef HD_HAS_ptexFaceOffset +#define HD_HAS_ptexFaceOffset +int HdGet_ptexFaceOffset() +{ + return 0; +} +#endif + +vec4 GetPatchCoord(int localIndex) +{ + vec4 patchCoord = GetInterpolatedPatchCoord(); + return vec4(patchCoord.xyz, patchCoord.w + HdGet_ptexFaceOffset()); +} + +vec4 GetPatchCoord() +{ + return GetPatchCoord(0); +} + +vec3 ComputeScreenSpacePeye() +{ + return inData.Peye.xyz / inData.Peye.w; +} + +vec3 ComputeScreenSpaceNeye() +{ + vec3 Peye = ComputeScreenSpacePeye(); + vec3 Neye = normalize(cross(dFdx(Peye), dFdy(Peye))); + return (gl_FrontFacing ? Neye : -Neye); +} + +void main(void) +{ + bool isFlipped = IsFlipped(); + + DiscardBasedOnShading(gl_FrontFacing, isFlipped); + + DiscardBasedOnTopologicalVisibility(); + + vec4 color = vec4(0.5, 0.5, 0.5, 1); +#ifdef HD_HAS_displayColor + color.rgb = HdGet_displayColor().rgb; +#endif +#ifdef HD_HAS_displayOpacity + color.a = HdGet_displayOpacity(); +#endif + + vec3 Peye = ComputeScreenSpacePeye(); + + vec3 Neye = inData.Neye; + // Normalize Neye after rasterizer interpolation. + if (length(Neye) > 0.0) { + Neye = normalize(Neye); + } + // Give the shader key a chance to override the normal. + Neye = GetNormal(Neye, 0); + // Orient the normal for shading. + Neye = GetShadingNormal(Neye, isFlipped); + + vec4 patchCoord = GetPatchCoord(); + color = ShadingTerminal(vec4(Peye, 1), Neye, color, patchCoord); + + color = ApplyEdgeColor(color, patchCoord); + +#ifdef HD_MATERIAL_TAG_MASKED + if (ShouldDiscardByAlpha(color)) { + discard; + } +#endif + + RenderOutput(vec4(Peye, 1), Neye, color, patchCoord); +} diff --git a/blender/lib/usd/hdSt/resources/shaders/meshFaceCull.glslfx b/blender/lib/usd/hdSt/resources/shaders/meshFaceCull.glslfx new file mode 100644 index 0000000..9f0443d --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/meshFaceCull.glslfx @@ -0,0 +1,57 @@ +-- glslfx version 0.1 + +// +// Copyright 2020 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/meshFaceCull.glslfx + + +--- -------------------------------------------------------------------------- +-- glsl MeshFaceCull.Fragment.None + +void DiscardBasedOnShading(bool frontFacing, bool isFlipped) +{ + // Nothing to do since h/w face culling is used. +} + +--- -------------------------------------------------------------------------- +-- glsl MeshFaceCull.Fragment.FrontFacing + +void DiscardBasedOnShading(bool frontFacing, bool isFlipped) +{ + if (frontFacing != isFlipped) { + discard; + } +} + +--- -------------------------------------------------------------------------- +-- glsl MeshFaceCull.Fragment.BackFacing + +void DiscardBasedOnShading(bool frontFacing, bool isFlipped) +{ + if ((!frontFacing) != isFlipped) { + discard; + } +} diff --git a/blender/lib/usd/hdSt/resources/shaders/meshNormal.glslfx b/blender/lib/usd/hdSt/resources/shaders/meshNormal.glslfx new file mode 100644 index 0000000..c6d02de --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/meshNormal.glslfx @@ -0,0 +1,218 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/meshNormal.glslfx + + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Scene + +vec3 GetNormal(vec3 Neye, int index) +{ + vec3 normal = vec3(0); +#if defined(HD_HAS_normals) + normal = vec3(HdGet_normals(index).xyz); +#endif + + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + normal = vec4(transpose(transformInv * GetWorldToViewInverseMatrix()) * + vec4(normal,0)).xyz; + + if (length(normal) > 0.0) + normal = normalize(normal); + return normal; +} + +vec3 GetNormal(vec3 Neye, int index, vec2 localST) +{ + return GetNormal(Neye, index); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Scene.Patches + +vec3 GetNormal(vec3 Neye, int index, vec2 localST) +{ + vec3 normal = vec3(0); +#if defined(HD_HAS_normals) + normal = vec3(HdGet_normals(index, localST).xyz); +#endif + + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + normal = vec4(transpose(transformInv * GetWorldToViewInverseMatrix()) * + vec4(normal,0)).xyz; + + if (length(normal) > 0.0) + normal = normalize(normal); + return normal; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Smooth + +vec3 GetNormal(vec3 Neye, int index) +{ + vec3 normal = vec3(0); +#if defined(HD_HAS_smoothNormals) + normal = vec3(HdGet_smoothNormals(index).xyz); +#elif defined(HD_HAS_packedSmoothNormals) + normal = vec3(HdGet_packedSmoothNormals(index).xyz); +#endif + + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + normal = vec4(transpose(transformInv * GetWorldToViewInverseMatrix()) * + vec4(normal,0)).xyz; + + if (length(normal) > 0.0) + normal = normalize(normal); + return normal; +} + +vec3 GetNormal(vec3 Neye, int index, vec2 localST) +{ + return GetNormal(Neye, index); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Flat + +vec3 GetNormal(vec3 Neye, int index) +{ + vec3 normal = vec3(0); +#if defined(HD_HAS_flatNormals) + normal = vec3(HdGet_flatNormals(index).xyz); +#elif defined(HD_HAS_packedFlatNormals) + normal = vec3(HdGet_packedFlatNormals(index).xyz); +#endif + + MAT4 transformInv = ApplyInstanceTransformInverse(HdGet_transformInverse()); + normal = vec4(transpose(transformInv * GetWorldToViewInverseMatrix()) * + vec4(normal,0)).xyz; + + if (length(normal) > 0.0) + normal = normalize(normal); + return normal; +} + +vec3 GetNormal(vec3 Neye, int index, vec2 localST) +{ + return GetNormal(Neye, index); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Fragment.ScreenSpace + +FORWARD_DECL(vec3 ComputeScreenSpaceNeye()); + +vec3 GetNormal(vec3 Neye, int index) +{ + return ComputeScreenSpaceNeye(); +} + +vec3 GetNormal(vec3 Neye, int index, vec2 localST) +{ + return GetNormal(Neye, index); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Pass + +vec3 GetNormal(vec3 Neye, int index) +{ + return Neye; +} + +vec3 GetNormal(vec3 Neye, int index, vec2 localST) +{ + return GetNormal(Neye, index); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Geometry.Flat + +vec3 GetTriGeometryNormal(vec3 Neye, vec4 Peye0, vec4 Peye1, vec4 Peye2, + bool isFlipped) +{ + // ignore vertex normal and compute flat facing normal + vec3 n = normalize(cross(Peye1.xyz - Peye0.xyz, Peye2.xyz - Peye0.xyz)); + return isFlipped ? -n : n; +} + +vec3 GetQuadGeometryNormal(vec3 Neye, + vec4 Peye0, vec4 Peye1, vec4 Peye2, vec4 Peye3, + bool isFlipped) +{ + // 0---3 + // |. | + // | . | + // | .| + // 1---2 + // ignore vertex normal and compute flat facing normal + // average diagonal cross products to deal with co-linear edges + vec3 A0 = Peye2.xyz - Peye3.xyz; + vec3 B0 = Peye0.xyz - Peye3.xyz; + vec3 A1 = Peye0.xyz - Peye1.xyz; + vec3 B1 = Peye2.xyz - Peye1.xyz; + vec3 n = normalize(cross(B0, A0) + cross(B1, A1)); + return isFlipped ? -n : n; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Geometry.NoFlat + +vec3 GetTriGeometryNormal(vec3 Neye, vec4 Peye0, vec4 Peye1, vec4 Peye2, + bool isFlipped) +{ + return Neye; +} + +vec3 GetQuadGeometryNormal(vec3 Neye, + vec4 Peye0, vec4 Peye1, vec4 Peye2, vec4 Peye3, + bool isFlipped) +{ + return Neye; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Fragment.SingleSided + +vec3 GetShadingNormal(vec3 N, bool isFlipped) +{ + // the fragment shader takes already-flipped-normals. + // no need to flip here. + return N; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshNormal.Fragment.DoubleSided + +vec3 GetShadingNormal(vec3 N, bool isFlipped) +{ + // note that negative scaling isn't taken into account in gl_FrontFacing + // so we have to consider isFlipped here too. + return (isFlipped != gl_FrontFacing) ? N : -N; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/meshWire.glslfx b/blender/lib/usd/hdSt/resources/shaders/meshWire.glslfx new file mode 100644 index 0000000..c657a68 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/meshWire.glslfx @@ -0,0 +1,355 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/meshWire.glslfx + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskTriangle + +vec3 GetPrimitiveEdgeMask() +{ + // A value of one in this mask hides the corresponding edge. + // (See hd/meshUtil.cpp) + return vec3(0, GetEdgeFlag() & 1, GetEdgeFlag() & 2); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskQuad + +vec3 GetPrimitiveEdgeMask() +{ + // A value of one in this mask hides the corresponding edge. + // (See hd/meshUtil.cpp) + return vec3(1, 0, GetEdgeFlag() != 0); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskPTVSTriQuad + +vec4 GetFlaggedPrimitiveEdgeDistance(vec4 edgeDistance) +{ + if (GetEdgeFlag() != 0) { + edgeDistance.yz += vec2(2.0); + } + return edgeDistance; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskTriQuad + +vec3 GetPrimitiveEdgeMask() +{ + // A value of one in this mask hides the corresponding edge. + // (See hd/meshUtil.cpp) + if (GetTriQuadID() == 0) { + return vec3(GetEdgeFlag() != 0, 1, 0); + } else { + return vec3(0, 1, GetEdgeFlag() != 0); + } +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskMetalRefinedTriQuad + +vec3 GetPrimitiveEdgeMask() +{ + return vec3(0, 1, 0); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskRefinedQuad + +vec3 GetPrimitiveEdgeMask() +{ + // Hide the common edge between the pair of rasterized triangles. + return vec3(1,0,0); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskPTVSRefinedTriQuad + +vec4 GetFlaggedPrimitiveEdgeDistance(vec4 edgeDistance) +{ + return edgeDistance; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeMaskNone + +vec3 GetPrimitiveEdgeMask() +{ + return vec3(0); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeCommon + +// Returns the distance of the current fragment (in viewport pixel units) from +// the nearest edge. +float GetMinEdgeDistance() +{ + // Hide triangle edges by adding edge mask. + vec3 param = GetEdgeCoord() + GetPrimitiveEdgeMask(); + vec3 edgeDistance = max(vec3(0.0), param / fwidth(param)); + return min(edgeDistance.x, + min(edgeDistance.y, + edgeDistance.z)); +} + +// Use edge distance to compute a smooth opacity falloff for good looking edges. +float GetEdgeFalloff(float d) { + return exp2(-4 * d * d); +} + +float GetEdgeOpacity() { + return GetEdgeFalloff(GetMinEdgeDistance()); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeRefinedTriquad + +// Returns the distance of the current fragment (in viewport pixel units) from +// the nearest edge. +float GetMinEdgeDistance() +{ + vec2 leftBottom = GetEdgeCoord(); + vec2 rightTop = vec2(1.0) - leftBottom; + vec4 param = vec4(leftBottom.y, rightTop.x, rightTop.y, leftBottom.x); + vec4 edgeDistance = max(vec4(0.0), param / fwidth(param)); + edgeDistance = GetFlaggedPrimitiveEdgeDistance(edgeDistance); + return min( + min(edgeDistance.x, edgeDistance.y), + min(edgeDistance.z, edgeDistance.w) + ); +} + +// Use edge distance to compute a smooth opacity falloff for good looking edges. +float GetEdgeFalloff(float d) { + return exp2(-4 * d * d); +} + +float GetEdgeOpacity() { + return GetEdgeFalloff(GetMinEdgeDistance()); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeParam + +vec3 GetEdgeParamTriangle() +{ + // Expand barycentric coordinates + vec2 param = GetPatchCoord(0).xy; + vec3 barycentric = vec3(param.x, param.y, 1 - param.x - param.y); + + // Match triangle edge order + return barycentric.yzx; +} + +vec3 GetEdgeDistanceTriangle() +{ + vec3 param = GetEdgeParamTriangle(); + return max(vec3(0.0), param / fwidth(param)); +} + +vec4 GetEdgeParamQuad() +{ + // Expand coordinates to opposite corners of quad + vec2 leftBottom = GetPatchCoord(0).xy; + vec2 rightTop = vec2(1.0) - leftBottom; + + // Match quad edge order + return vec4(leftBottom.y, rightTop.x, rightTop.y, leftBottom.x); +} + +vec4 GetEdgeDistanceQuad() +{ + vec4 param = GetEdgeParamQuad(); + return max(vec4(0.0), param / fwidth(param)); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshPatchWire.Fragment.PatchEdgeTriangle + +// Override for subdivided faces to make the boundary of the face stand out. +float GetEdgeOpacityForPatch() +{ + // Distance in pixels from triangle patch edges. + vec3 patchEdgeDistance = GetEdgeDistanceTriangle(); + + const float patchEdgeMinDistance = + min(patchEdgeDistance.x, min(patchEdgeDistance.y, patchEdgeDistance.z)); + + // Reduce the opacity of edges not on patch boundaries + if (patchEdgeMinDistance > 1.0) { + return 0.3 * GetEdgeOpacity(); + } + + // Use distance to patch edge rather than distance to primitive edge + return GetEdgeFalloff(patchEdgeMinDistance); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshPatchWire.Fragment.PatchEdgeQuad + +// Override for subdivided faces to make the boundary of the face stand out. +float GetEdgeOpacityForPatch() +{ + // Distance in pixels from quad patch edges. + vec4 patchEdgeDistance = GetEdgeDistanceQuad(); + + // Hide sub-patch internal edges introduced by quadrangulation + if (GetEdgeFlag() != 0) patchEdgeDistance.yz += vec2(2.0); + + const float patchEdgeMinDistance = + min(min(patchEdgeDistance.x, patchEdgeDistance.y), + min(patchEdgeDistance.z, patchEdgeDistance.w)); + + // Reduce the opacity of edges not on patch boundaries + if (patchEdgeMinDistance > 1.0) { + return 0.3 * GetEdgeOpacity(); + } + + // Use distance to patch edge rather than distance to primitive edge + return GetEdgeFalloff(patchEdgeMinDistance); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.NoEdge + +vec4 ApplyEdgeColor(vec4 Cfill, vec4 patchCoord) +{ + return Cfill; +} + +// Return a large value, signifying that the fragment isn't near an edge. +float GetMinEdgeDistance() +{ + return 1000.0; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeOnSurface + +vec4 ApplyEdgeColor(vec4 Cfill, vec4 patchCoord) +{ + float p = GetEdgeOpacity(); + + vec4 wireColor = GetWireframeColor(); + + // If wireColor is unset (zero), the fill color is just dimmed a bit. + if (all(equal(wireColor, vec4(0)))) wireColor.a = 0.5; + + vec4 Cedge = vec4(mix(Cfill.rgb, wireColor.rgb, wireColor.a), 1); + Cfill.rgb = mix(Cfill.rgb, Cedge.rgb, p); + + return Cfill; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeOnlyBlendColor + +vec4 ApplyEdgeColor(vec4 Cfill, vec4 patchCoord) +{ + float p = GetEdgeOpacity(); + if (p < 0.5) discard; + + vec4 wireColor = GetWireframeColor(); + + // If wireColor is unset (zero), ignore it altogether + + Cfill.rgb = mix(Cfill.rgb, wireColor.rgb, wireColor.a); + Cfill.a = 1.0; // edges ignore input opacity and are opaque. + + return Cfill; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeOnlyNoBlend + +vec4 ApplyEdgeColor(vec4 Cfill, vec4 patchCoord) +{ + float p = GetEdgeOpacity(); + if (p < 0.5) discard; + + Cfill.a = 1.0; // edges ignore input opacity and are opaque. + return Cfill; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeCoord.Barycentric + +vec3 GetEdgeCoord() +{ + return GetBarycentricCoord(); +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeCoord.PostTessPositionInPatch.Triangle + +vec3 GetEdgeCoord() +{ + return ptvsBarycentricCoord; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshWire.Fragment.EdgeCoord.PostTessPositionInPatch.Quad + +vec2 GetEdgeCoord() +{ + return ptvsBarycentricCoord; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshPatchWire.Fragment.EdgeOnSurface + +vec4 ApplyEdgeColor(vec4 Cfill, vec4 patchCoord) +{ + float p = GetEdgeOpacityForPatch(); + + vec4 wireColor = GetWireframeColor(); + + // If wireColor is unset (zero), the fill color is just dimmed a bit. + if (all(equal(wireColor, vec4(0)))) wireColor.a = 0.5; + + vec4 Cedge = vec4(mix(Cfill.rgb, wireColor.rgb, wireColor.a), 1); + Cfill.rgb = mix(Cfill.rgb, Cedge.rgb, p); + + return Cfill; +} + +--- -------------------------------------------------------------------------- +-- glsl MeshPatchWire.Fragment.EdgeOnly + +vec4 ApplyEdgeColor(vec4 Cfill, vec4 patchCoord) +{ + float p = GetEdgeOpacity(); + if (p < 0.5) discard; + + Cfill.a = 1.0; // edges ignore input opacity and are opaque. + + return Cfill; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/pointId.glslfx b/blender/lib/usd/hdSt/resources/shaders/pointId.glslfx new file mode 100644 index 0000000..aa9d781 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/pointId.glslfx @@ -0,0 +1,104 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/pointId.glslfx + +#import $TOOLS/hdx/shaders/selection.glslfx + +--- -------------------------------------------------------------------------- +-- glsl PointId.Vertex.None + +int GetPointId() +{ + return -1; +} + +float GetPointRasterSize(int pointId) +{ + return GetPointSize(); +} + +void ProcessPointId(int pointId) +{ + // do nothing +} + +--- -------------------------------------------------------------------------- +-- layout PointId.Vertex.PointParam + +# Plumb the pointId, for use in the FS. +# XXX: This works only because the TES and GS stages are disabled when +# rendering as points. If they are enabled, we need to add the plumbing. +[ + ["out", "int", "vsPointId", "flat"] +] + +--- -------------------------------------------------------------------------- +-- glsl PointId.Vertex.PointParam + +// Fwd declare accessor method defined via code gen +FORWARD_DECL(int GetBaseVertexOffset()); +int GetPointId() +{ + return int(hd_VertexID) - GetBaseVertexOffset(); +} + +// Fwd declare selection decoder method defined in hdx/shaders/selection.glslfx +FORWARD_DECL(bool IsPointSelected(int)); +float GetPointRasterSize(int pointId) +{ + return IsPointSelected(pointId)? + GetPointSelectedSize() : GetPointSize(); +} + +void ProcessPointId(int pointId) +{ + vsPointId = pointId; +} + +--- -------------------------------------------------------------------------- +-- glsl PointId.Fragment.Fallback + +int GetPointId() +{ + return -1; +} + +--- -------------------------------------------------------------------------- +-- layout PointId.Fragment.PointParam + +[ + ["in", "int", "vsPointId", "flat"] +] + +--- -------------------------------------------------------------------------- +-- glsl PointId.Fragment.PointParam + +int GetPointId() +{ + return vsPointId; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/points.glslfx b/blender/lib/usd/hdSt/resources/shaders/points.glslfx new file mode 100644 index 0000000..4fca91d --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/points.glslfx @@ -0,0 +1,118 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/points.glslfx + +#import $TOOLS/hdSt/shaders/instancing.glslfx +#import $TOOLS/hdSt/shaders/terminals.glslfx +#import $TOOLS/hdSt/shaders/pointId.glslfx + +--- -------------------------------------------------------------------------- +-- layout Point.Vertex + +[ + ["out block", "VertexData", "outData", + ["vec4", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Point.Vertex + +// Fwd declare methods defined in pointId.glslfx, that are used below. +FORWARD_DECL(int GetPointId()); +FORWARD_DECL(void ProcessPointId(int)); + +void main(void) +{ + MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + vec4 point = vec4(HdGet_points().xyz, 1); + + outData.Peye = vec4(GetWorldToViewMatrix() * transform * point); + + ProcessPrimvarsIn(); + + gl_Position = vec4(GetProjectionMatrix() * outData.Peye); + ApplyClipPlanes(outData.Peye); + + // check for primvar 'widths' +#if defined(HD_HAS_widths) + float pvWidth = HdGet_widths(); +#else + float pvWidth = 1.0; +#endif + // scale width following prman's behavior + vec4 w = vec4(transform * pvWidth * normalize(vec4(1,1,1,0))); + float width = length(w.xyz); + + // compute screenspace width + vec4 diameter = vec4(GetProjectionMatrix() * vec4(width, 0, outData.Peye.z, 1)); + vec2 viewportScale = GetViewport().zw * 0.5; + + gl_PointSize = clamp(viewportScale.x * diameter.x/diameter.w, + HD_GL_POINT_SIZE_MIN, HD_GL_POINT_SIZE_MAX); + + ProcessPointId(GetPointId()); +} + +--- -------------------------------------------------------------------------- +-- layout Point.Fragment + +[ + ["in block", "VertexData", "inData", + ["vec4", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Point.Fragment + +void main(void) +{ + vec3 Peye = inData.Peye.xyz / inData.Peye.w; + // camera facing. + vec3 Neye = vec3(0, 0, 1); + + vec4 color = vec4(0.5, 0.5, 0.5, 1); +#ifdef HD_HAS_displayColor + color.rgb = HdGet_displayColor().rgb; +#endif +#ifdef HD_HAS_displayOpacity + color.a = HdGet_displayOpacity(); +#endif + + vec4 patchCoord = vec4(0); + color = ShadingTerminal(vec4(Peye, 1), Neye, color, patchCoord); + +#ifdef HD_MATERIAL_TAG_MASKED + if (ShouldDiscardByAlpha(color)) { + discard; + } +#endif + + RenderOutput(vec4(Peye, 1), Neye, color, patchCoord); +} diff --git a/blender/lib/usd/hdSt/resources/shaders/ptexTexture.glslfx b/blender/lib/usd/hdSt/resources/shaders/ptexTexture.glslfx new file mode 100644 index 0000000..ca3e4bd --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/ptexTexture.glslfx @@ -0,0 +1,388 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "ptexTextureSampler" : { + "source": [ + "PtexTextureSampler" + ] + } + } + } +} + +-- glsl PtexTextureSampler + +// ----- following code comes from OpenSubdiv/examples/common + +struct PtexPacking { + int page; + int nMipmap; + int uOffset; + int vOffset; + int adjSizeDiffs[4]; + int width; + int height; +}; + +int computeMipmapOffsetU(int w, int level) +{ + int width = 1 << w; + int m = (0x55555555 & (width | (width-1))) << (w&1); + int x = ~((1 << (w -((level-1)&~1))) - 1); + return (m & x) + ((level+1)&~1); +} + +int computeMipmapOffsetV(int h, int level) +{ + int height = 1 << h; + int m = (0x55555555 & (height-1)) << ((h+1)&1);; + int x = ~((1 << (h - (level&~1))) - 1 ); + return (m & x) + (level&~1); +} + +void evalQuadraticBSpline(float u, + REF(thread,float) B0, REF(thread,float) B1, REF(thread,float) B2, + REF(thread,float) BU0, REF(thread,float) BU1, REF(thread,float) BU2) +{ + B0 = 0.5 * (u*u - 2.0*u + 1); + B1 = 0.5 + u - u*u; + B2 = 0.5 * u*u; + + BU0 = u - 1.0; + BU1 = 1 - 2 * u; + BU2 = u; +} + +PtexPacking getPtexPacking(usampler1DArray packings, int faceID, int level) +{ + const int layoutTexelsPerFace = 3; + const int stride = textureSize(packings, 0).x; + const int index = faceID * layoutTexelsPerFace; + const int layer = index / stride; + + const ivec2 packingIndex = ivec2(index - (layer * stride), layer); + + const uvec2 page = texelFetch(packings, packingIndex+ivec2(0,0), 0).xy; + const uvec2 offsets = texelFetch(packings, packingIndex+ivec2(1,0), 0).xy; + const uvec2 sizes = texelFetch(packings, packingIndex+ivec2(2,0), 0).xy; + + PtexPacking packing; + packing.page = int(page.x); + packing.nMipmap = int(page.y); + packing.uOffset = int(offsets.x); + packing.vOffset = int(offsets.y); + + const int adjSizeDiffs = int(sizes.x); + packing.adjSizeDiffs[0] = (adjSizeDiffs >> 12) & 0xf; + packing.adjSizeDiffs[1] = (adjSizeDiffs >> 8) & 0xf; + packing.adjSizeDiffs[2] = (adjSizeDiffs >> 4) & 0xf; + packing.adjSizeDiffs[3] = (adjSizeDiffs >> 0) & 0xf; + + const int wh = int(sizes.y); + const int w = wh >> 8; + const int h = wh & 0xff; + + // clamp max level + level = min(level, packing.nMipmap); + + packing.width = 1 << (w-level); + packing.height = 1 << (h-level); + + if (level > 0) { + packing.uOffset += computeMipmapOffsetU(w, level); + packing.vOffset += computeMipmapOffsetV(h, level); + } + + return packing; +} + +// ---------------------------------------------------------------------------- +// Non-Mipmap Lookups +// ---------------------------------------------------------------------------- + +vec4 PtexLookupNearest(vec4 patchCoord, + sampler2DArray data, + usampler1DArray packings) +{ + vec2 uv = clamp(patchCoord.xy, vec2(0), vec2(1)); + int faceID = int(patchCoord.w); + PtexPacking ppack = getPtexPacking(packings, faceID, 0); + vec2 coords = vec2(uv.x * ppack.width + ppack.uOffset, + uv.y * ppack.height + ppack.vOffset); + return texelFetch(data, ivec3(int(coords.x), int(coords.y), ppack.page), 0); +} + +vec4 PtexLookupNearest(vec4 patchCoord, + int level, + sampler2DArray data, + usampler1DArray packings) +{ + vec2 uv = clamp(patchCoord.xy, vec2(0), vec2(1)); + int faceID = int(patchCoord.w); + PtexPacking ppack = getPtexPacking(packings, faceID, level); + vec2 coords = vec2(uv.x * ppack.width + ppack.uOffset, + uv.y * ppack.height + ppack.vOffset); + return texelFetch(data, ivec3(int(coords.x), int(coords.y), ppack.page), 0); +} + +vec4 PtexLookupFast(vec4 patchCoord, + sampler2DArray data, + usampler1DArray packings) +{ + vec2 uv = clamp(patchCoord.xy, vec2(0), vec2(1)); + int faceID = int(patchCoord.w); + PtexPacking ppack = getPtexPacking(packings, faceID, 0); + + ivec3 size = textureSize(data, 0); + vec2 coords = vec2((uv.x * ppack.width + ppack.uOffset)/size.x, + (uv.y * ppack.height + ppack.vOffset)/size.y); + return texture(data, vec3(coords.x, coords.y, ppack.page)); +} + +vec4 PtexLookupFast(vec4 patchCoord, + int level, + sampler2DArray data, + usampler1DArray packings) +{ + vec2 uv = clamp(patchCoord.xy, vec2(0), vec2(1)); + int faceID = int(patchCoord.w); + PtexPacking ppack = getPtexPacking(packings, faceID, level); + + ivec3 size = textureSize(data, 0); + vec2 coords = vec2((uv.x * ppack.width + ppack.uOffset)/size.x, + (uv.y * ppack.height + ppack.vOffset)/size.y); + return texture(data, vec3(coords.x, coords.y, ppack.page)); +} + +vec4 PtexLookup(vec4 patchCoord, + int level, + sampler2DArray data, + usampler1DArray packings) +{ + vec2 uv = clamp(patchCoord.xy, vec2(0), vec2(1)); + int faceID = int(patchCoord.w); + PtexPacking ppack = getPtexPacking(packings, faceID, level); + + vec2 coords = vec2(uv.x * ppack.width + ppack.uOffset, + uv.y * ppack.height + ppack.vOffset); + + coords -= vec2(0.5, 0.5); + + int c0X = int(floor(coords.x)); + int c1X = int(ceil(coords.x)); + int c0Y = int(floor(coords.y)); + int c1Y = int(ceil(coords.y)); + + float t = coords.x - float(c0X); + float s = coords.y - float(c0Y); + + vec4 d0 = texelFetch(data, ivec3(c0X, c0Y, ppack.page), 0); + vec4 d1 = texelFetch(data, ivec3(c0X, c1Y, ppack.page), 0); + vec4 d2 = texelFetch(data, ivec3(c1X, c0Y, ppack.page), 0); + vec4 d3 = texelFetch(data, ivec3(c1X, c1Y, ppack.page), 0); + + vec4 result = (1-t) * ((1-s)*d0 + s*d1) + t * ((1-s)*d2 + s*d3); + + return result; +} + +vec4 PtexLookupQuadratic(REF(thread, vec4) du, + REF(thread, vec4) dv, + vec4 patchCoord, + int level, + sampler2DArray data, + usampler1DArray packings) +{ + vec2 uv = clamp(patchCoord.xy, vec2(0), vec2(1)); + int faceID = int(patchCoord.w); + PtexPacking ppack = getPtexPacking(packings, faceID, level); + + vec2 coords = vec2(uv.x * ppack.width + ppack.uOffset, + uv.y * ppack.height + ppack.vOffset); + + coords -= vec2(0.5, 0.5); + + int cX = int(round(coords.x)); + int cY = int(round(coords.y)); + + float x = 0.5 - (float(cX) - coords.x); + float y = 0.5 - (float(cY) - coords.y); + + vec4 d[9]; + d[0] = texelFetch(data, ivec3(cX-1, cY-1, ppack.page), 0); + d[1] = texelFetch(data, ivec3(cX-1, cY-0, ppack.page), 0); + d[2] = texelFetch(data, ivec3(cX-1, cY+1, ppack.page), 0); + d[3] = texelFetch(data, ivec3(cX-0, cY-1, ppack.page), 0); + d[4] = texelFetch(data, ivec3(cX-0, cY-0, ppack.page), 0); + d[5] = texelFetch(data, ivec3(cX-0, cY+1, ppack.page), 0); + d[6] = texelFetch(data, ivec3(cX+1, cY-1, ppack.page), 0); + d[7] = texelFetch(data, ivec3(cX+1, cY-0, ppack.page), 0); + d[8] = texelFetch(data, ivec3(cX+1, cY+1, ppack.page), 0); + + float B[3], D[3]; + vec4 BUCP[3], DUCP[3]; + BUCP[0] = vec4(0); BUCP[1] = vec4(0); BUCP[2] = vec4(0); + DUCP[0] = vec4(0); DUCP[1] = vec4(0); DUCP[2] = vec4(0); + + evalQuadraticBSpline(y, B[0], B[1], B[2], D[0], D[1], D[2]); + + for (int i = 0; i < 3; ++i) { + for (int j = 0; j < 3; j++) { + vec4 A = d[i*3+j]; + BUCP[i] += A * B[j]; + DUCP[i] += A * D[j]; + } + } + + evalQuadraticBSpline(x, B[0], B[1], B[2], D[0], D[1], D[2]); + + vec4 result = vec4(0); + du = vec4(0); + dv = vec4(0); + for (int i = 0; i < 3; ++i) { + result += B[i] * BUCP[i]; + du += D[i] * BUCP[i]; + dv += B[i] * DUCP[i]; + } + + du *= ppack.width; + dv *= ppack.height; + + return result; +} + +// ---------------------------------------------------------------------------- +// MipMap Lookups +// ---------------------------------------------------------------------------- +vec4 PtexMipmapLookupNearest(vec4 patchCoord, + float level, + sampler2DArray data, + usampler1DArray packings) +{ +#if defined(SEAMLESS_MIPMAP) + // diff level + int faceID = int(patchCoord.w); + vec2 uv = patchCoord.xy; + PtexPacking packing = getPtexPacking(packings, faceID); + level += mix(mix(packing.adjSizeDiffs[0], packing.adjSizeDiffs[1], uv.x), + mix(packing.adjSizeDiffs[3], packing.adjSizeDiffs[2], uv.x), + uv.y); +#endif + + int levelm = int(floor(level)); + int levelp = int(ceil(level)); + float t = level - float(levelm); + + vec4 result = (1-t) * PtexLookupNearest(patchCoord, levelm, data, packings) + + t * PtexLookupNearest(patchCoord, levelp, data, packings); + return result; +} + + +vec4 PtexMipmapLookup(vec4 patchCoord, + float level, + sampler2DArray data, + usampler1DArray packings) +{ +#if defined(SEAMLESS_MIPMAP) + // diff level + int faceID = int(patchCoord.w); + vec2 uv = patchCoord.xy; + PtexPacking packing = getPtexPacking(packings, faceID); + level += mix(mix(packing.adjSizeDiffs[0], packing.adjSizeDiffs[1], uv.x), + mix(packing.adjSizeDiffs[3], packing.adjSizeDiffs[2], uv.x), + uv.y); +#endif + + int levelm = int(floor(level)); + int levelp = int(ceil(level)); + float t = level - float(levelm); + + vec4 result = (1-t) * PtexLookup(patchCoord, levelm, data, packings) + + t * PtexLookup(patchCoord, levelp, data, packings); + return result; +} + +vec4 PtexMipmapLookupQuadratic(REF(thread, vec4) du, + REF(thread, vec4) dv, + vec4 patchCoord, + float level, + sampler2DArray data, + usampler1DArray packings) +{ +#if defined(SEAMLESS_MIPMAP) + // diff level + int faceID = int(patchCoord.w); + vec2 uv = patchCoord.xy; + PtexPacking packing = getPtexPacking(packings, faceID); + level += mix(mix(packing.adjSizeDiffs[0], packing.adjSizeDiffs[1], uv.x), + mix(packing.adjSizeDiffs[3], packing.adjSizeDiffs[2], uv.x), + uv.y); +#endif + + int levelm = int(floor(level)); + int levelp = int(ceil(level)); + float t = level - float(levelm); + + vec4 du0, du1, dv0, dv1; + vec4 r0 = PtexLookupQuadratic(du0, dv0, patchCoord, levelm, data, packings); + vec4 r1 = PtexLookupQuadratic(du1, dv1, patchCoord, levelp, data, packings); + + vec4 result = mix(r0, r1, t); + du = mix(du0, du1, t); + dv = mix(dv0, dv1, t); + + return result; +} + +vec4 PtexMipmapLookupQuadratic(vec4 patchCoord, + float level, + sampler2DArray data, + usampler1DArray packings) +{ + vec4 du, dv; + return PtexMipmapLookupQuadratic(du, dv, patchCoord, level, data, packings); +} + +// end from OpenSubdiv/examples/common + +// ---------------------------------------------------------------------------- + +vec4 +PtexTextureLookup(sampler2DArray data, + usampler1DArray packings, + vec4 patchCoord) +{ + return PtexLookup(patchCoord, + /*level = */0, + data, + packings); +} + diff --git a/blender/lib/usd/hdSt/resources/shaders/renderPass.glslfx b/blender/lib/usd/hdSt/resources/shaders/renderPass.glslfx new file mode 100644 index 0000000..214fa30 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/renderPass.glslfx @@ -0,0 +1,297 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/renderPass.glslfx + +-- glsl RenderPass.Camera + +// --------------------------------------------------------------------------- +// global defines +// --------------------------------------------------------------------------- +// codeGen can override this range (currently not). +// quadro 4000's max is 189. +#ifndef HD_GL_POINT_SIZE_MAX +#define HD_GL_POINT_SIZE_MAX 100.0 +#endif +#ifndef HD_GL_POINT_SIZE_MIN +#define HD_GL_POINT_SIZE_MIN .45 +#endif + +// --------------------------------------------------------------------------- +// render pass states +// --------------------------------------------------------------------------- + +MAT4 GetWorldToViewMatrix() { +#if defined(HD_HAS_worldToViewMatrix) + return MAT4(HdGet_worldToViewMatrix()); +#else + return MAT4(1); +#endif +} +MAT4 GetWorldToViewInverseMatrix() { +#if defined(HD_HAS_worldToViewInverseMatrix) + return MAT4(HdGet_worldToViewInverseMatrix()); +#else + return MAT4(1); +#endif +} +MAT4 GetProjectionMatrix() { +#if defined(HD_HAS_projectionMatrix) + return MAT4(HdGet_projectionMatrix()); +#else + return MAT4(1); +#endif +} +vec3 GetPositionInWorldSpace(vec3 windowRelativeCoord) +{ +#if defined(HD_HAS_imageToWorldMatrix) + vec4 pImage = vec4(windowRelativeCoord, 1.0); + vec4 pWorld = vec4(HdGet_imageToWorldMatrix() * pImage); + return (pWorld / pWorld.w).xyz; +#else + return windowRelativeCoord; +#endif +} +float GetLightingBlendAmount() { +#if defined(HD_HAS_lightingBlendAmount) + return HdGet_lightingBlendAmount(); +#else + return 1; +#endif +} +vec4 GetViewport() { +#if defined(HD_HAS_viewport) + return HdGet_viewport(); +#else + return vec4(0,0,1,1); +#endif +} +float GetTessLevel() { +#if defined(HD_HAS_tessLevel) + return HdGet_tessLevel(); +#else + return 1; +#endif +} +float GetPointSize() { +#if defined(HD_HAS_pointSize) + return HdGet_pointSize(); +#else + return 3.0; +#endif +} +float GetPointSelectedSize() { +#if defined(HD_HAS_pointSelectedSize) + return HdGet_pointSelectedSize(); +#else + return 5.0; +#endif +} +vec4 GetWireframeColor() { +// Individual prims can specify an alternative wireframe color +// to one specified in the render pass. This is used in cases were +// there is not enough contrast with the normal one. +#if defined(HD_HAS_overrideWireframeColor) + return HdGet_overrideWireframeColor(); +#elif defined(HD_HAS_wireframeColor) + return HdGet_wireframeColor(); +#else + return vec4(0,0,0,0); +#endif +} +vec4 GetMaskColor() { +#if defined(HD_HAS_maskColor) + return HdGet_maskColor(); +#else + return vec4(0.5,0,0,1); +#endif +} +vec4 GetIndicatorColor() { +#if defined(HD_HAS_indicatorColor) + return HdGet_indicatorColor(); +#else + return vec4(0,0.5,0,1); +#endif +} + +bool ShouldDiscardByAlpha(vec4 color) +{ +#if defined(HD_HAS_alphaThreshold) + float alphaThreshold = HdGet_alphaThreshold(); +#else + float alphaThreshold = 0; +#endif + return (color.a < alphaThreshold); +} + +vec2 ApplyAxisAlignedAffineTransform(vec4 t, vec2 pt) +{ + return t.xy * pt + t.zw; +} + +-- glsl RenderPass.CameraFS + +vec2 HorizontallyNormalizedFilmbackCoordinates() +{ + const vec4 transform = +#ifdef HD_HAS_imageToHorizontallyNormalizedFilmback + HdGet_imageToHorizontallyNormalizedFilmback(); +#else + vec4(vec2(1.0), vec2(0.0)); +#endif + return ApplyAxisAlignedAffineTransform(transform, gl_FragCoord.xy); +} + +-- glsl RenderPass.ApplyClipPlanes + +void ApplyClipPlanes(vec4 Peye) +{ +#if defined(HD_HAS_clipPlanes) +#if defined(HD_NUM_clipPlanes) // more than 1 (clipPlanes[N]) + for (int i=0; i> 0) & 0xff) / 255.0, + ((id >> 8) & 0xff) / 255.0, + ((id >> 16) & 0xff) / 255.0, + ((id >> 24) & 0xff) / 255.0); +} + +void RenderOutput(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + // Allow alpha threshold discard for ID renders regardless of material tag + if (ShouldDiscardByAlpha(color)) { + discard; + } + + int primId = HdGet_primID(); + primIdOut = IntToVec4(primId); + + // instanceIndex is a tuple of integers (num nested levels). + // for picking, we store global instanceId (instanceIndex[0]) in the + // selection framebuffer and then reconstruct the tuple in postprocess. + int instanceId = GetDrawingCoord().instanceIndex[0]; + instanceIdOut = IntToVec4(instanceId); +} diff --git a/blender/lib/usd/hdSt/resources/shaders/renderPassShader.glslfx b/blender/lib/usd/hdSt/resources/shaders/renderPassShader.glslfx new file mode 100644 index 0000000..8870e70 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/renderPassShader.glslfx @@ -0,0 +1,69 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/renderPassShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader": { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader": { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "RenderPass.NoSelection", + "RenderPass.ApplyColorOverrides", + "RenderPass.RenderColor" ] + } + } + } +} diff --git a/blender/lib/usd/hdSt/resources/shaders/simpleLightingShader.glslfx b/blender/lib/usd/hdSt/resources/shaders/simpleLightingShader.glslfx new file mode 100644 index 0000000..bcce457 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/simpleLightingShader.glslfx @@ -0,0 +1,58 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/simpleLightingShader.glslfx + +#import $TOOLS/glf/shaders/pcfShader.glslfx +#import $TOOLS/glf/shaders/simpleLighting.glslfx + +-- configuration +{ + "techniques": { + "default": { + "fragmentShader" : { + "source": [ + "PCF.ShadowFilterFragmentOnly", + "SimpleLighting.LightIntegrator", + "SimpleLighting.SimpleLighting", + "LightingOverride.SimpleLighting" + ] + } + } + } +} + +-- glsl LightingOverride.SimpleLighting + +vec3 FallbackLighting(in vec3 Peye, in vec3 Neye, in vec3 color) +{ + return simpleLightingMaterial( + vec4(color,1), + vec4(Peye,1), + Neye, + vec4(1)).rgb; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/terminals.glslfx b/blender/lib/usd/hdSt/resources/shaders/terminals.glslfx new file mode 100644 index 0000000..27b33ff --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/terminals.glslfx @@ -0,0 +1,472 @@ +-- glslfx version 0.1 + +// +// Copyright 2017 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/terminals.glslfx + +--- -------------------------------------------------------------------------- +-- glsl Geometry.CustomDisplacement + +FORWARD_DECL( + vec4 displacementShader(int index, vec4 Peye, vec3 Neye, vec4 patchCoord)); + +vec4 DisplacementTerminal(int index, vec4 Peye, vec3 Neye, vec4 patchCoord) +{ + return displacementShader(index, Peye, Neye, patchCoord); +} + +--- -------------------------------------------------------------------------- +-- glsl Geometry.NoCustomDisplacement + +vec4 DisplacementTerminal(int index, vec4 Peye, vec3 Neye, vec4 patchCoord) +{ + return Peye; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.CommonTerminals + +struct ReprStyle +{ + vec4 color; + bool usePrimvarColor; + bool usePrimvarAlpha; + bool applyColorOverride; + bool useSurfaceShaderColor; + bool useSurfaceShaderAlpha; +}; + +struct ScalarOverride +{ + bool enabled; + vec3 color; +}; + +FORWARD_DECL(ReprStyle GetReprStyle()); +FORWARD_DECL(ScalarOverride GetScalarOverride()); +FORWARD_DECL( + vec4 surfaceShader(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord)); +FORWARD_DECL(vec4 postSurfaceShader(vec4 Peye, vec3 Neye, vec4 color)); + +vec4 ShadingTerminal(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + ReprStyle reprStyle = GetReprStyle(); + + // Apply scalar override. + ScalarOverride scalarOverride = GetScalarOverride(); + if (scalarOverride.enabled) { + vec4 result; + + result.rgb = scalarOverride.color; + result.a = reprStyle.usePrimvarAlpha ? color.a: reprStyle.color.a; + + vec4 colorOverride = ApplyColorOverrides(result); + result = reprStyle.applyColorOverride ? colorOverride : result; + + return result; + } + + // Draw mode can override face color + vec4 reprColor; + + reprColor.rgb = reprStyle.usePrimvarColor ? color.rgb : reprStyle.color.rgb; + reprColor.a = reprStyle.usePrimvarAlpha ? color.a : reprStyle.color.a; + + // Compute color overrides + vec4 colorOverride = ApplyColorOverrides(reprColor); + reprColor = reprStyle.applyColorOverride ? colorOverride : reprColor; + + + // Surface shading can be expensive and also can contain undesirable + // side effects (like discards). So only run it for reprs that require it. + + if (reprStyle.useSurfaceShaderColor || + reprStyle.useSurfaceShaderAlpha) { + vec4 shadingColor; + + shadingColor = surfaceShader(Peye, + Neye, + reprColor, + patchCoord); + +#ifdef HD_HAS_postSurfaceShader + shadingColor = postSurfaceShader(Peye, + Neye, + shadingColor); +#endif + + reprColor.rgb = reprStyle.useSurfaceShaderColor ? + shadingColor.rgb : + reprColor.rgb; + + reprColor.a = reprStyle.useSurfaceShaderAlpha ? + shadingColor.a : + reprColor.a; + } + + vec4 baseColor = color; + baseColor = ApplyColorOverrides(baseColor); + + vec4 litColor = mix(baseColor, + reprColor, + GetLightingBlendAmount()); + + // Final overrides. + + return ApplyColorOverridesPostLighting(litColor); +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.Surface + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights + +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + return integrateLightsDefault(Peye, Neye, props); +} + +#endif // HD_HAS_definedIntegrateLights +#endif + +ReprStyle GetReprStyle() +{ + ReprStyle reprStyle; + + reprStyle.color = vec4(0.0, 0.0, 0.0, 1.0); + reprStyle.usePrimvarColor = true; + reprStyle.usePrimvarAlpha = true; + reprStyle.applyColorOverride = true; + reprStyle.useSurfaceShaderColor = true; + reprStyle.useSurfaceShaderAlpha = true; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.SurfaceUnlit + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights + +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + return integrateLightsConstant(Peye, Neye, props); +} + +#endif // HD_HAS_definedIntegrateLights +#endif + +ReprStyle GetReprStyle() +{ + ReprStyle reprStyle; + + reprStyle.color = vec4(0.0, 0.0, 0.0, 1.0); + reprStyle.usePrimvarColor = true; + reprStyle.usePrimvarAlpha = true; + reprStyle.applyColorOverride = true; + reprStyle.useSurfaceShaderColor = true; + reprStyle.useSurfaceShaderAlpha = true; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.SurfaceSheer + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights + +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + return integrateLightsConstant(Peye, Neye, props); +} + +#endif // HD_HAS_definedIntegrateLights +#endif + +ReprStyle GetReprStyle() +{ + // shade only every 2nd pixel in x and y + // creating a thin stippled mesh grid + float factor = step(0.5, fract((gl_FragCoord.x + 1.0) * 0.5)) + * step(0.5, fract((gl_FragCoord.y + 0.0) * 0.5)); + + // make the surface translucent so that the lines of + // the mesh edges are visible even from the back faces. + float alpha = 0.2 * (1.0 - factor); + + + ReprStyle reprStyle; + + reprStyle.color = vec4(0.0, 0.0, 0.0, alpha); + reprStyle.usePrimvarColor = true; + reprStyle.usePrimvarAlpha = false; + reprStyle.applyColorOverride = true; + reprStyle.useSurfaceShaderColor = true; + reprStyle.useSurfaceShaderAlpha = false; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.SurfaceOutline + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights + +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + return integrateLightsConstant(Peye, Neye, props); +} + +#endif // HD_HAS_definedIntegrateLights +#endif + +ReprStyle GetReprStyle() +{ + ReprStyle reprStyle; + + reprStyle.color = vec4(0.0, 0.0, 0.0, 1.0); + reprStyle.usePrimvarColor = false; + reprStyle.usePrimvarAlpha = false; + reprStyle.applyColorOverride = false; + reprStyle.useSurfaceShaderColor = false; + reprStyle.useSurfaceShaderAlpha = false; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.ConstantColor + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights + +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + return integrateLightsConstant(Peye, Neye, props); +} + +#endif // HD_HAS_definedIntegrateLights +#endif + +ReprStyle GetReprStyle() +{ + ReprStyle reprStyle; + + reprStyle.color = vec4(0.0, 0.0, 0.0, 1.0); + reprStyle.usePrimvarColor = true; + reprStyle.usePrimvarAlpha = true; + reprStyle.applyColorOverride = true; + reprStyle.useSurfaceShaderColor = false; + reprStyle.useSurfaceShaderAlpha = false; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.HullColor + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights + +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + return integrateLightsConstant(Peye, Neye, props); +} + +#endif // HD_HAS_definedIntegrateLights +#endif + +ReprStyle GetReprStyle() +{ + vec4 hullColor = vec4(vec3(0.18), 1.0); + +#if defined(HD_HAS_selectedWeight) + float weight = clamp(HdGet_selectedWeight(), 0.0, 1.0); + + if (weight <= 0.0) { + discard; + } + + // The three control points of the quadratic curve for the selection color + // with the binomial coefficient premultiplied in. + // constant : 1 + // linear : 1 1 + // quadratic : 1 2 1 + vec3 c0 = vec3(0.0, 0.0, 0.0); // 1.0 * Black + vec3 c1 = vec3(2.0, 0.0, 0.0); // 2.0 * Red + vec3 c2 = vec3(1.0, 1.0, 0.0); // 1.0 * Yellow + + // de Casteljau quadratic curve interpolation + // A recursive application of lerp ('mix' in glsl) reducing the order each + // step of the recursion. + // at weight = 0.0 we get c0 + // at weight = 0.5 we get c0 * .25 + c1 * 0.5 + c2 *.25 + // at weight = 1.0 we get c2 + // Thus it is a smooth curve going from c0 to c2 bending towards c1 + hullColor.rgb = mix(mix(c0, c1, weight), + mix(c1, c2, weight), weight); + +#else +#if defined(HD_HAS_hullColor) + hullColor.rgb = HdGet_hullColor(); +#endif +#if defined(HD_HAS_hullOpacity) + hullColor.a = HdGet_hullOpacity(); +#endif +#endif + + ReprStyle reprStyle; + + reprStyle.color = hullColor; + reprStyle.usePrimvarColor = false; + reprStyle.usePrimvarAlpha = false; + reprStyle.applyColorOverride = true; + reprStyle.useSurfaceShaderColor = false; + reprStyle.useSurfaceShaderAlpha = false; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.PointColor + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights + +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) +{ + return integrateLightsConstant(Peye, Neye, props); +} + +#endif // HD_HAS_definedIntegrateLights +#endif + +ReprStyle GetReprStyle() +{ + vec4 pointColor = vec4(vec3(0.18), 1.0); +#if defined(HD_HAS_pointColor) + pointColor = HdGet_pointColor(); +#endif + + ReprStyle reprStyle; + + reprStyle.color = pointColor; + reprStyle.usePrimvarColor = false; + reprStyle.usePrimvarAlpha = false; + reprStyle.applyColorOverride = true; + reprStyle.useSurfaceShaderColor = false; + reprStyle.useSurfaceShaderAlpha = false; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.PointShaded + +ReprStyle GetReprStyle() +{ + vec4 pointColor = vec4(vec3(0.18), 1.0); +#if defined(HD_HAS_pointColor) + pointColor = HdGet_pointColor(); +#endif + + ReprStyle reprStyle; + + reprStyle.color = pointColor; + reprStyle.usePrimvarColor = false; + reprStyle.usePrimvarAlpha = false; + reprStyle.applyColorOverride = true; + reprStyle.useSurfaceShaderColor = true; + reprStyle.useSurfaceShaderAlpha = true; + + return reprStyle; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.ScalarOverride + +ScalarOverride GetScalarOverride() +{ + ScalarOverride result; + +#if defined(HD_HAS_scalarOverride) + result.enabled = true; + + // Allow a scalar value to override the color from the surface + // for previewing heatmaps. This is useful for editing workflows. +#if defined(HD_HAS_scalarOverrideColorRamp) + int rampCount = constantPrimvars[GetDrawingCoord().constantCoord]. + scalarOverrideColorRamp.length(); + float scalar = HdGet_scalarOverride() * float(rampCount - 1); + float baseIndex = floor(scalar); + float nextIndex = min(float(rampCount - 1), baseIndex + 1.0); + float interp = scalar - baseIndex; + result.color = mix(HdGet_scalarOverrideColorRamp(int(baseIndex)).rgb, + HdGet_scalarOverrideColorRamp(int(nextIndex)).rgb, + interp); +#else + // If no ramp is given just gamma correct the scalar as greyscale. + result.color = vec3(pow(HdGet_scalarOverride(), 2.2)); +#endif // HD_HAS_scalarOverrideColorRamp + +#else // HD_HAS_scalarOverride + result.enabled = false; + result.color = vec3(0.0, 0.0, 0.0); +#endif + + return result; +} + +--- -------------------------------------------------------------------------- +-- glsl Fragment.NoScalarOverride + +ScalarOverride GetScalarOverride() +{ + ScalarOverride result; + + result.enabled = false; + result.color = vec3(0.0, 0.0, 0.0); + + return result; +} diff --git a/blender/lib/usd/hdSt/resources/shaders/visibility.glslfx b/blender/lib/usd/hdSt/resources/shaders/visibility.glslfx new file mode 100644 index 0000000..9f10d08 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/visibility.glslfx @@ -0,0 +1,97 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/visibility.glslfx + +--- -------------------------------------------------------------------------- +-- glsl Visibility.Fragment.Fallback +void DiscardBasedOnTopologicalVisibility() +{ + // Nothing to do, since there's no authored opinion. +} + +--- -------------------------------------------------------------------------- +-- glsl Visibility.Fragment.Topology + +FORWARD_DECL(int GetElementID()); // code gen + +void GetBitmaskBufferIndices( + int id, REF(thread, int) arrayIndex, REF(thread, int) bitIndex) +{ + arrayIndex = id / 32; + bitIndex = id % 32; +} + +bool IsBitSet(uint bitmask, int bitIndex) +{ + return bool(bitmask & (1 << bitIndex)); +} + +bool IsElementVisible() +{ +#if defined(HD_HAS_elementsVisibility) + // Element (face) visibility is encoded as an array of bitmasks (uint32) + // with 1 bit per authored face. + int elementId = GetElementID(); + // When rendering a mesh as points, element id doesn't make sense. Code + // gen returns -1 as a fallback for this case. + if (elementId != -1) { + int arrayIndex, bitIndex; + GetBitmaskBufferIndices(elementId, arrayIndex, bitIndex); + uint ev = HdGet_elementsVisibility(arrayIndex); + return IsBitSet(ev, bitIndex); + } +#endif + return true; +} + +FORWARD_DECL(int GetPointId()); // pointId.glslfx + +bool IsPointVisible() +{ +#if defined(HD_HAS_pointsVisibility) + // Point visibility is encoded as an array of bitmasks (uint32) with 1 bit + // per unrefined vertex. + int pointId = GetPointId(); + // When *not* rendering a mesh as points, we return -1 for the point id. + // See PointId.Fragment.Fallback + if (pointId != -1) { + int arrayIndex, bitIndex; + GetBitmaskBufferIndices(pointId, arrayIndex, bitIndex); + uint pv = HdGet_pointsVisibility(arrayIndex); + return IsBitSet(pv, bitIndex); + } +#endif + return true; +} + +void DiscardBasedOnTopologicalVisibility() +{ + if (!IsElementVisible() || !IsPointVisible()) { + discard; + } +} \ No newline at end of file diff --git a/blender/lib/usd/hdSt/resources/shaders/volume.glslfx b/blender/lib/usd/hdSt/resources/shaders/volume.glslfx new file mode 100644 index 0000000..c9d8888 --- /dev/null +++ b/blender/lib/usd/hdSt/resources/shaders/volume.glslfx @@ -0,0 +1,647 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdSt/shaders/volume.glslfx + +#import $TOOLS/hdSt/shaders/instancing.glslfx +#import $TOOLS/hdSt/shaders/pointId.glslfx + +--- -------------------------------------------------------------------------- +-- layout Volume.Vertex + +[ + ["out block", "VertexData", "outData", + # Relying on perspectively correct interpolation. + ["vec3", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Volume.Vertex + +void main(void) +{ + // Bounding box vertex in local spce + const vec4 point = vec4(HdGet_points().xyz, 1); + + const MAT4 transform = ApplyInstanceTransform(HdGet_transform()); + + // Bounding box vertex in eye space. + const vec4 pointEye = vec4(GetWorldToViewMatrix() * transform * point); + + outData.Peye = pointEye.xyz / pointEye.w; + + gl_Position = vec4(GetProjectionMatrix() * pointEye); + + ProcessPrimvarsIn(); +} + +--- -------------------------------------------------------------------------- +-- layout Volume.Fragment + +[ + ["in block", "VertexData", "inData", + ["vec3", "Peye"] + ] +] + +--- -------------------------------------------------------------------------- +-- glsl Volume.Fragment + +// Quality knobs, should eventually be configurable. +// +// We also might have different values for the raymarch +// integrating the pixel value and for the raymarch doing +// the lighting computation. + +const int maxNumSteps = 10000; + +// Min transmittance (ray marching stops when underrun) +const float minTransmittance = 0.002; + +// Minimal scattering amount to ray march to light +const float minScattering = 0.002; + +// Eye space to local space. +// Used frequently per ray-marching step in both volumeIntegrator +// and accumulatedTransmittance, so computed only once in main. +// +MAT4 instanceModelViewInverse; +// Eye space to volume bounding box space +mat4 eyeToBBox; +float resolvedStepSizeEye; +float resolvedStepSizeEyeLighting; +float resolvedStepSizeWorld; +float resolvedStepSizeWorldLighting; + +// Transform a point by a 4x4 matrix +vec3 +transformPoint(mat4 m, vec3 point) +{ + const vec4 result = vec4(m * vec4(point, 1.0)); + return result.xyz / result.w; +} + +#ifdef HD_SHADER_SUPPORTS_DOUBLE_PRECISION +vec3 +transformPoint(dmat4 m, vec3 point) +{ + const vec4 result = vec4(m * vec4(point, 1.0)); + return result.xyz / result.w; +} +#endif + +// Transform a direction by a 4x4 matrix +vec3 +transformDir(mat4 m, vec3 dir) +{ + const vec4 result = vec4(m * vec4(dir, 0.0)); + return result.xyz; +} + +#ifdef HD_SHADER_SUPPORTS_DOUBLE_PRECISION +vec3 +transformDir(dmat4 m, vec3 dir) +{ + const vec4 result = vec4(m * vec4(dir, 0.0)); + return result.xyz; +} +#endif + +// Compute time when a ray starting at pos with direction dir +// exits the axis-aligned box with vertices lMin and lMax. +// +// Assumes that dir.x isn't close to zero. +float +timeRayExitsBoxPreferX(vec3 pos, vec3 dir, vec3 lMin, vec3 lMax) +{ + // Compute the time when the ray exists the region + // R1 = [xMin, xMax] x [-inf,inf] x [-inf,inf]. + + // Depending on whether the the ray is going left or right, compute + // the time when the ray is intersecting the plane containing the + // left or right face of the box. + float result = (((dir.x > 0.0) ? lMax.x : lMin.x) - pos.x) / dir.x; + + // Compute the time when the ray exists the region + // R2 = [xMin, xMax] x [yMin,yMax] x [-inf,inf]. + + // We can compute the intersection where the ray left R1 as + // pos + dir * result. + // If this intersection is above or below the box, we know that there + // is an earlier intersection of the ray with the plane containing + // the top or bottom face of the box, compute that intersection time. + const float y = pos.y + dir.y * result; + if (y < lMin.y) { + result = (lMin.y - pos.y) / dir.y; + } + if (y > lMax.y) { + result = (lMax.y - pos.y) / dir.y; + } + + // Compute the time when the ray exists the region + // R3 = [xMin, xMax] x [yMin,yMax] x [zMin,zMax]. + + // Analogous procedure to above. + const float z = pos.z + dir.z * result; + if (z < lMin.z) { + result = (lMin.z - pos.z) / dir.z; + } + if (z > lMax.z) { + result = (lMax.z - pos.z) / dir.z; + } + + return result; +} + +// Compute time when a ray starting at pos with direction dir +// exits the axis-aligned box with vertices lMin and lMax. +// +// Assumes that dir is normalized. +float +timeRayExitsBox(vec3 pos, vec3 dir, vec3 lMin, vec3 lMax) +{ + // Uses timeRayExitsBoxPreferX after permuting the coordinates + // to make sure that x is not close to zero. + // + // Note that because dir has unit length, at least one of its entries + // has absolute value larger 1/2 ( (1/2)^2 + (1/2)^2 + (1/2)^2 < 1^2). + + const vec3 abs_dir = abs(dir); + if (abs_dir.x > 0.5) { + return timeRayExitsBoxPreferX( + pos , dir , lMin , lMax ); + } + if (abs_dir.y > 0.5) { + return timeRayExitsBoxPreferX( + pos.yzx, dir.yzx, lMin.yzx, lMax.yzx); + } + + return timeRayExitsBoxPreferX( + pos.zxy, dir.zxy, lMin.zxy, lMax.zxy); +} + +// Given a ray in eye space starting inside a volume, compute the time when it +// exists the volume (assuming rayDirectionEye is normalized). +float +timeRayExitsVolume(vec3 rayStartEye, vec3 rayDirectionEye) +{ + // Transform ray to volume bounding box space + const vec3 rayStartBBox = transformPoint(eyeToBBox, rayStartEye); + const vec3 rayDirectionBBox = transformDir (eyeToBBox, rayDirectionEye); + + // Compute when ray is leaving the volume bounding box + return timeRayExitsBox(rayStartBBox, + rayDirectionBBox, + vec3(HdGet_volumeBBoxLocalMin().xyz), + vec3(HdGet_volumeBBoxLocalMax().xyz)); + +} + +// Given a ray in eye space, compute the time when it entered the volume +// (assuming rayDirectionEye is normalized). +// Note that it is assumed that the ray point is in the volume and that the +// result will be negative. +float +timeRayEnteredVolume(vec3 rayEndEye, vec3 rayDirectionEye) +{ + // Compute when reversed ray is exiting the volume bounding box + return - timeRayExitsVolume(rayEndEye, -rayDirectionEye); +} + +vec3 +coordsToLocalVolumeSpace(vec3 coords) +{ + return transformPoint(instanceModelViewInverse, coords); +} + +#if NUM_LIGHTS == 0 + +vec3 +lightingComputation(vec3 rayPointEye, vec3 rayDirectionEye) +{ + return vec3(0.0); +} + +#else + +// Compute how the transmittance of volume from Peye to a +// light source in the given direction rayDirection. +// This integrates the density from Peye to the boundary of +// the volume. The assumption is that the light source is +// out of the volume. +float +accumulatedTransmittance(vec3 rayStartEye, vec3 rayDirectionEye) +{ + int i = 1; + + float totalExtinction = 0.0; + + const vec3 rayStepEye = resolvedStepSizeEyeLighting * rayDirectionEye; + + const float rayLength = timeRayExitsVolume(rayStartEye, rayDirectionEye); + + const int numSteps = + int(floor(min(float(maxNumSteps), + rayLength / resolvedStepSizeEyeLighting))); + + while(i < numSteps) { + const vec3 rayPointEye = rayStartEye + i * rayStepEye; + + totalExtinction += extinctionFunction(rayPointEye); + + i+=1; + } + + return exp(-totalExtinction * resolvedStepSizeWorldLighting); +} + +// Computes amount of light arriving at point Peye +// taking attenuation (e.g., by inverse-square law), shadows, +// transmittance by volume into account. +vec3 +lightingComputation(vec3 rayPointEye, vec3 rayDirectionEye) +{ + vec3 result = vec3(0.0); + for (int i = 0; i < NUM_LIGHTS; ++i) { + LightSource light = GetLightSource(i); + + const vec4 Plight = light.position; + + const vec3 lightDirectionEye = normalize( + (Plight.w == 0.0) ? Plight.xyz : Plight.xyz - rayPointEye); + + const float atten = + lightDistanceAttenuation(vec4(rayPointEye,1), i) * + lightSpotAttenuation(lightDirectionEye, i); + +// For now, not using shadows for volumes. +#if USE_SHADOWS && 0 + const float shadow = light.hasShadow ? + shadowing(/*lightIndex=*/i, rayPointEye) : 1.0; +#else + const float shadow = 1.0; +#endif + + if (shadow > 0.0001) { + result += + shadow * + atten * + // Assuming that light source is outside of volume's + // bounding box (might integrate extinction along ray + // beyond light source). + accumulatedTransmittance(rayPointEye, lightDirectionEye) * + phaseFunction(-rayDirectionEye, lightDirectionEye) * + light.diffuse.rgb; + } + } + + return result; +} + +#endif + +// Result of integrating volume along a ray +struct VolumeContribution +{ + // Coordinates where ray marching hit the first non-empty voxel + // in eye space. 0 indicates the ray hit only empty voxels. + vec3 firstHitPeye; + + // Integrated color + vec3 color; + + // Integrated transmittance, i.e., what fraction of light from + // geometry behind the volume is still visible. + float transmittance; +}; + +VolumeContribution +volumeIntegrator(vec3 rayStartEye, vec3 rayDirectionEye, float rayLength) +{ + int i = 1; + + VolumeContribution result; + result.firstHitPeye = vec3(0.0); + result.color = vec3(0.0); + result.transmittance = 1.0; + + const vec3 rayStepEye = resolvedStepSizeEye * rayDirectionEye; + + const int numSteps = + int(floor(min(float(maxNumSteps), rayLength / resolvedStepSizeEye))); + + // integrate transmittance and light along ray for bounding box + while(i < numSteps) { + const vec3 rayPointEye = rayStartEye + i * rayStepEye; + + // Evaluate volume shader functions to determine extinction, + // scattering, and emission. + const float extinctionValue = extinctionFunction(rayPointEye); + const float scatteringValue = scatteringFunction(rayPointEye); + const vec3 emissionValue = emissionFunction(rayPointEye); + + // If this is the first time the ray is hitting a non-empty voxel, + // record the coordinates. + if (all(equal(result.firstHitPeye, vec3(0.0)))) { + if ( extinctionValue > 0 || + scatteringValue > 0 || + any(greaterThan(emissionValue, vec3(0)))) { + result.firstHitPeye = rayPointEye; + } + } + + // In scattering contribution, lighting only computed if scattering + // is non-trivial. + const vec3 inScattering = + (resolvedStepSizeWorld * scatteringValue >= minScattering) ? + (scatteringValue * + lightingComputation(rayPointEye, rayDirectionEye)) + : vec3(0.0); + + // In scattering and emission contribution + result.color += + (resolvedStepSizeWorld * result.transmittance) * + (inScattering + emissionValue); + + // Update transmittance + result.transmittance *= exp(-extinctionValue * resolvedStepSizeWorld); + + // Stop when the volume has become close to opaque. + if (result.transmittance < minTransmittance) { + break; + } + + i+=1; + } + + return result; +} + +// Is camera orthographic? +bool +isCameraOrthographic() +{ + return abs(GetProjectionMatrix()[3][3] - 1.0) < 1e-5; +} + +// Convert depth value z in [-1,1] to depth in eye space [-near, -far]. +float +NDCtoEyeZ(float z) +{ + const MAT4 m = inverse(GetProjectionMatrix()); + return float((m[2][2] * z + m[3][2]) / (m[2][3] * z + m[3][3])); +} + +// Compute the z-value of the near clipping plane in eye space. +float +computeNearZ() +{ +#ifdef HD_MINUS_ONE_TO_ONE_DEPTH_RANGE + return NDCtoEyeZ(-1.0); +#else + return NDCtoEyeZ(0.0); +#endif +} + +// Compute the near clipping distance. Always returns a positive value. +float +computeNearDistance() +{ + return abs(computeNearZ()); +} + +// Consider the ray from the eye to a given point in eye space. +// Computes the direction of this ray in both cases where the +// camera is orthographic or perspective. +vec3 +computeRayDirectionEye(vec3 rayPointEye) +{ + // In NDC space, the ray is always pointing into the z-direction (0,0,1). + // In clip space, this corresponds to (0,0,1,0). + // We need to multiply (0,0,1,0) by the inverse projection matrix to + // get to homogeneous eye space. + // Or alternatively, we can get the direction in homogeneous eye space + // by taking the respective column of the inverse projection matrix: + const vec4 dir = vec4(inverse(GetProjectionMatrix())[2]); + + // To compute the corresponding direction in non-homogeneous eye space, + // compute the position of the ray after time dt << 1: + // vec4 pHomogeneous = vec4(rayPointEye, 1.0) + dt * dir; + // vec3 p = pHomogeneous.xyz / pHomogeneous.w; + // + // Or equivalently: + // vec3 p = (rayPointEye + dt * dir.xyz) / (1.0 + dir.w * dt); + // And since dt << 1, we have + // vec3 p = (rayPointEye + dt * dir.xyz) * (1.0 - dir.w * dt); + // And dropping higher order terms: + // vec3 p = rayPointEye + dt * (dir.xyz - rayPointEye * dir.w); + // So the new direction is given by: + // vec3 d = dir.xyz - rayPointEye * dir.w; + + // Normalize direction in eye space. + return normalize(dir.xyz - rayPointEye * dir.w); +} + +// Given where the ray is about to leave the volume, compute where we +// should start ray marching: this is either the point where the ray +// would have entered the volume or the intersection with the near +// clipping plane or a sphere about the eye (in the perspective case). +// +vec3 +computeRayStartEye(vec3 rayEndEye, vec3 rayDirectionEye) +{ + // Time where ray would have entered volume (negative). + const float startTime = timeRayEnteredVolume(rayEndEye, rayDirectionEye); + + if (isCameraOrthographic()) { + // Time where ray would have intersected near plane + const float nearTime = + (computeNearZ() - rayEndEye.z) + / rayDirectionEye.z; + // Take the latter of the two times for getting the start point + return rayEndEye + max(startTime, nearTime) * rayDirectionEye; + } + + // Note that we intersect the ray with sphere about the eye with + // radius equal to the near distance in the perspective case rather + // than just the above intersection with the near plane. + // + // The motivation is that the distance between the eye and the + // near plane is non-constant across the image. Thus, ray-marching + // would skip more volume away from the center of the image making + // the image darker there - so we see opposite vignetting. To + // avoid this bias, we use a sphere about the eye. + // + // Note that we can use points in front of the near plane + // since OIT resolution makes no assumptions about the + // depth value. + // + + // Compute point where ray would have entered volume + const vec3 rayStartEye = rayEndEye + startTime * rayDirectionEye; + // If this point is behind the eye or in the sphere about the eye, ... + if (rayStartEye.z > 0.0 || length(rayStartEye) < computeNearDistance()) { + // ... use point on sphere. + return normalize(rayDirectionEye) * computeNearDistance(); + } + + return rayStartEye; +} + +// The depth at which we hit opaque geometry in eye space (negative +// value by OpenGL convention). +float +sampleZBuffer(vec2 fragcoord) +{ +#ifdef HD_HAS_depthReadback + // Sample the z-Buffer at the frag coordinate. + const float bufferVal = texelFetch(HdGetSampler_depthReadback(), + ivec2(fragcoord), + /* lod = */ 0).x; +#else + // Assume far-plane if we cannot sample the z-Buffer. + const float bufferVal = 1.0; +#endif + +#ifdef HD_MINUS_ONE_TO_ONE_DEPTH_RANGE + // Convert from [0, 1] to [-1, 1] to if necessary + return NDCtoEyeZ(2.0 * bufferVal - 1.0); +#else + return NDCtoEyeZ(bufferVal); +#endif +} + +// Compute how much length we need to ray march. +// +// The ray is encoded through it start point and direction. Its end will be +// determined from two things: +// - the eye space coordinates of this fragment which is part of the back-faces +// of the volume. +// - the z-Value of the opaque geometry (since we want to stop raymarching +// once the ray has hit opaque geometry) +float +computeRayLength(vec3 rayStartEye, vec3 rayDirectionEye, vec3 rayEndEye, + float opaqueZ) +{ + // Recall that the camera is looking down the minus z-direction by + // OpenGL conventions so we need to take the max to get the closer + // point. + const float rayEndZ = max(opaqueZ, rayEndEye.z); + return (rayEndZ - rayStartEye.z) / rayDirectionEye.z; +} + +float max3(float a, float b, float c) +{ + return max(a, max(b, c)); +} + +// Computes the inverse of the scaling of an affine transform. +// Approximately - since most transforms have uniform scaling +// and no shear, this is fine. +float +scaleOfMatrix(MAT4 m) +{ + // Take the maximum of the lengths of the images of the x, y + // and z vector. + // + // A more general, coordinate independent implementation would take + // the minimum singular value from the singular value decomposition. + // + const mat3 affinePart = mat3(m[0][0], m[0][1], m[0][2], + m[1][0], m[1][1], m[1][2], + m[2][0], m[2][1], m[2][2]); + return max3(length(affinePart[0]), + length(affinePart[1]), + length(affinePart[2])); +} + +#ifdef HD_HAS_integrateLights +#ifndef HD_HAS_definedIntegrateLights +#define HD_HAS_definedIntegrateLights +LightingContribution +integrateLights(vec4 Peye, vec3 Neye, LightingInterfaceProperties props) { + return integrateLightsDefault(Peye, Neye, props); +} +#endif +#endif + +void main(void) +{ + instanceModelViewInverse = + ApplyInstanceTransformInverse(HdGet_transformInverse()) * + GetWorldToViewInverseMatrix(); + + eyeToBBox = mat4( + MAT4(HdGet_volumeBBoxInverseTransform()) * instanceModelViewInverse); + + ProcessSamplingTransforms(instanceModelViewInverse); + + const float halfWorldSampleDistance = + 0.5 * float(HdGet_sampleDistance()) + / scaleOfMatrix(instanceModelViewInverse); + + const float viewScale = scaleOfMatrix(GetWorldToViewInverseMatrix()); + + resolvedStepSizeEye = + HdGet_stepSize() * halfWorldSampleDistance; + resolvedStepSizeWorld = + viewScale * resolvedStepSizeEye; + resolvedStepSizeEyeLighting = + HdGet_stepSizeLighting() * halfWorldSampleDistance; + resolvedStepSizeWorldLighting = + viewScale * resolvedStepSizeEyeLighting; + + // Discard front faces - ray marching stops at fragment eye position + // and starts at the intersection of ray with volume bounding box or + // near plane. + if (gl_FrontFacing != (determinant(instanceModelViewInverse) < 0.0)) { + discard; + } + + // camera facing. + const vec3 Neye = vec3(0, 0, 1); + + // compute ray for ray marching + const vec3 rayDirectionEye = computeRayDirectionEye(inData.Peye); + const vec3 rayStartEye = computeRayStartEye(inData.Peye, rayDirectionEye); + + // Use z-value from depth buffer to compute length for ray marching + const float opaqueZ = sampleZBuffer(gl_FragCoord.xy); + const float rayLength = computeRayLength( + rayStartEye, rayDirectionEye, inData.Peye, opaqueZ); + + const VolumeContribution volumeContribution = + volumeIntegrator(rayStartEye, rayDirectionEye, rayLength); + const float alpha = 1 - volumeContribution.transmittance; + const vec4 color = ApplyColorOverrides(vec4(volumeContribution.color, alpha)); + + const vec4 patchCoord = vec4(0.0); + + RenderOutput(vec4(volumeContribution.firstHitPeye, 1), + Neye, color, patchCoord); +} diff --git a/blender/lib/usd/hdSt/resources/textures/fallbackBlackDomeLight.png b/blender/lib/usd/hdSt/resources/textures/fallbackBlackDomeLight.png new file mode 100644 index 0000000..d4c45e5 Binary files /dev/null and b/blender/lib/usd/hdSt/resources/textures/fallbackBlackDomeLight.png differ diff --git a/blender/lib/usd/hdx/resources/plugInfo.json b/blender/lib/usd/hdx/resources/plugInfo.json new file mode 100644 index 0000000..70089c6 --- /dev/null +++ b/blender/lib/usd/hdx/resources/plugInfo.json @@ -0,0 +1,14 @@ +{ + "Plugins": [ + { + "Info": { + "ShaderResources": "shaders" + }, + "LibraryPath": "", + "Name": "hdx", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/hdx/resources/shaders/boundingBox.glslfx b/blender/lib/usd/hdx/resources/shaders/boundingBox.glslfx new file mode 100644 index 0000000..88ec135 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/boundingBox.glslfx @@ -0,0 +1,69 @@ +-- glslfx version 0.1 + +// +// Copyright 2022 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "BoundingBoxVertex": { + "source": [ "BoundingBox.Vertex" ] + }, + "BoundingBoxFragment": { + "source": [ "BoundingBox.Fragment" ] + } + } + } +} + +-- glsl BoundingBox.Vertex + +void main(void) +{ + const vec4 clipPos = worldViewProj[hd_InstanceID] * vec4(position,1.0); + gl_Position = clipPos; + + // Write out a flat (non-interpolated) screen space position for the + // provoking vertex for a line so we can measure how far a fragment + // is from that vertex in screen space. + dashStart = (clipPos.xy/clipPos.w+vec2(1.0))*(viewport.zw/2.0)+viewport.xy; +} + +-- glsl BoundingBox.Fragment + +void main(void) +{ + // Skip any pattern styling if the uniform/constant dashSize is invalid. + if (dashSize != 0.0) { + // Otherwise create a dashed pattern with equal solid and blank pixel + // sections for the line. + const float pixelDist = distance(dashStart, gl_FragCoord.xy); + if (mod(pixelDist, 2.0*dashSize) > dashSize) { + discard; + } + } + + hd_FragColor = color; +} diff --git a/blender/lib/usd/hdx/resources/shaders/colorChannel.glslfx b/blender/lib/usd/hdx/resources/shaders/colorChannel.glslfx new file mode 100644 index 0000000..7c22fbb --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/colorChannel.glslfx @@ -0,0 +1,70 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "ColorChannelFragment": { + "source": [ "ColorChannel.Fragment" ] + } + } + } +} + +-- glsl ColorChannel.Fragment + + +// Display channel values. These should match the the indices of the entries in +// HdxDisplayChannelTokens +#define CHANNEL_COLOR 0 +#define CHANNEL_RED 1 +#define CHANNEL_GREEN 2 +#define CHANNEL_BLUE 3 +#define CHANNEL_ALPHA 4 +#define CHANNEL_LUMINANCE 5 + +void main(void) +{ + vec2 fragCoord = uvOut * screenSize; + vec4 color = HgiTexelFetch_colorIn(ivec2(fragCoord)); + + // Display Channel + if (channel == CHANNEL_RED) { + color.g = color.b = color.r; + } else if (channel == CHANNEL_GREEN) { + color.r = color.b = color.g; + } else if (channel == CHANNEL_BLUE) { + color.r = color.g = color.b; + } else if (channel == CHANNEL_ALPHA) { + color.r = color.g = color.b = color.a; + } else if (channel == CHANNEL_LUMINANCE) { + const vec3 W = vec3(0.30, 0.59, 0.11); + color.r = color.g = color.b = dot(color.rgb, W); + } // Do nothing if channel == CHANNEL_COLOR + + hd_FragColor = color; +} diff --git a/blender/lib/usd/hdx/resources/shaders/colorCorrection.glslfx b/blender/lib/usd/hdx/resources/shaders/colorCorrection.glslfx new file mode 100644 index 0000000..0c1fc64 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/colorCorrection.glslfx @@ -0,0 +1,74 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "ColorCorrectionVertex": { + "source": [ "ColorCorrection.Vertex" ] + }, + "ColorCorrectionFragment": { + "source": [ "ColorCorrection.Fragment" ] + } + } + } +} + +-- glsl ColorCorrection.Vertex + +void main(void) +{ + gl_Position = position; + uvOut = uvIn; +} + +-- glsl ColorCorrection.Fragment + +// Similar to D3DX_DXGIFormatConvert.inl, but branchless +// https://www.shadertoy.com/view/wds3zM +vec3 FloatToSRGB(vec3 val) +{ + val = mix((val * 12.92), + (1.055 * pow(val, vec3(1.0/2.4)) - 0.055), + step(0.0031308, val)); + return val; +} + +void main(void) +{ + vec2 fragCoord = uvOut * screenSize; + vec4 inCol = HgiTexelFetch_colorIn(ivec2(fragCoord)); + + #if defined(GLSLFX_USE_OCIO) + inCol = OCIO_DISPLAY_FUNC(inCol); + #else + // Only color, not alpha is gamma corrected! + inCol.rgb = FloatToSRGB(inCol.rgb); + #endif + + hd_FragColor = inCol; +} diff --git a/blender/lib/usd/hdx/resources/shaders/fullscreen.glslfx b/blender/lib/usd/hdx/resources/shaders/fullscreen.glslfx new file mode 100644 index 0000000..b67a263 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/fullscreen.glslfx @@ -0,0 +1,66 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "FullscreenVertex": { + "source": [ "Fullscreen.Vertex" ] + }, + "CompositeFragmentNoDepth": { + "source": [ "Composite.FragmentNoDepth" ] + }, + "CompositeFragmentWithDepth": { + "source": [ "Composite.FragmentWithDepth" ] + } + } + } +} + +-- glsl Fullscreen.Vertex + +void main(void) +{ + gl_Position = position; + uvOut = uvIn; +} + +-- glsl Composite.FragmentNoDepth + +void main(void) +{ + hd_FragColor = vec4(HgiGet_colorIn(uvOut)); +} + +-- glsl Composite.FragmentWithDepth + +void main(void) +{ + float depth = HdGet_depth(uvOut).x; + hd_FragColor = vec4(HgiGet_colorIn(uvOut)); + hd_FragDepth = depth; +} diff --git a/blender/lib/usd/hdx/resources/shaders/oitResolveImageShader.glslfx b/blender/lib/usd/hdx/resources/shaders/oitResolveImageShader.glslfx new file mode 100644 index 0000000..fd77e09 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/oitResolveImageShader.glslfx @@ -0,0 +1,110 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "fragmentShader": { + "source": [ "OitResolve.Image" ] + } + } + } +} + +--- -------------------------------------------------------------------------- +-- layout OitResolve.Image + +[ + ["buffer readWrite", "CounterBuffer", "hdxOitCounterBuffer", + ["atomic_int", "hdxOitCounterBuffer"] + ] +] + +-- glsl OitResolve.Image + +vec4 imageShader(vec2 uv) +{ +#if defined(HD_HAS_hdxOitDataBuffer) + const int screenWidth = int(HdGet_oitScreenSize().x); + const int screenHeight = int(HdGet_oitScreenSize().y); + + // Must match the per-pixel sample count used when creating the OIT buffers. + // (See HdxOitResolveTask::_PrepareOitBuffers) + const int maxSamples = 8; + + const int dataBufferSize = screenWidth * screenHeight * maxSamples; + + // +1 because index 0 of counter buffer is reserved as atomic counter in + // WriteOitLayersToBuffer + int screenIndex = int(gl_FragCoord.x) + int(gl_FragCoord.y) * screenWidth; + screenIndex += 1; + + int nodeIndex = ATOMIC_LOAD(hdxOitCounterBuffer[screenIndex]); + int numDepths = 0; + + // XXX renderPass.WriteOitLayersToBuffer does not clamp the number of + // depth samples we store for a pixel. Here we process no more than + // 'maxSamples' for a pixel. (If there are greater than 'maxSamples' + // samples stored for this pixel some will currently not contribute) + vec4 sortedColor[maxSamples]; + float sortedDepth[maxSamples]; + + while (nodeIndex != -1 && + numDepths < maxSamples && + nodeIndex < dataBufferSize) + { + float currentDepth = hdxOitDepthBuffer[nodeIndex]; + int insertIndex = numDepths; + // Recall that depths are in eye space, so inequality is flipped. + while (insertIndex > 0 && sortedDepth[insertIndex - 1] < currentDepth) { + sortedDepth[insertIndex] = sortedDepth[insertIndex - 1]; + sortedColor[insertIndex] = sortedColor[insertIndex - 1]; + insertIndex -= 1; + } + sortedColor[insertIndex] = hdxOitDataBuffer[nodeIndex]; + sortedDepth[insertIndex] = hdxOitDepthBuffer[nodeIndex]; + numDepths += 1; + nodeIndex = hdxOitIndexBuffer[nodeIndex]; + } + + // Assume color in sortedColor is pre-multiplied by alpha + int depth = 0; + vec4 colorAccum = vec4(0); + while (depth < numDepths) { + colorAccum += sortedColor[depth] * (1 - colorAccum.a); + + if (colorAccum.a >= 1.0) break; + + depth += 1; + } + + colorAccum = clamp(colorAccum, vec4(0), vec4(1)); + return colorAccum; +#else + return vec4(0); +#endif +} diff --git a/blender/lib/usd/hdx/resources/shaders/outline.glslfx b/blender/lib/usd/hdx/resources/shaders/outline.glslfx new file mode 100644 index 0000000..613ee8c --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/outline.glslfx @@ -0,0 +1,155 @@ +-- glslfx version 0.1 + +// +// Copyright 2020 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "OutlineFragment": { + "source": [ "Outline.Fragment" ] + } + } + } +} + +-- glsl Outline.Fragment + +#define EPSILON 0.0001 +#define LARGE 100000 + +// The color texture with the highlighted areas. + +// Given a delta x and y in relation to the current texel coordinates, check if +// the colorIn texture has a non-black texel and its distance^2 to the current +// texel is smaller than the one passed as dist2. The color of that texel will +// be returned in the color output var. +void testTexel(int x, int y, REF(thread,float) dist2, REF(thread,vec4) color) +{ + vec2 texCoords = uvOut + vec2(x, y) * texelSize; + vec4 c = HgiGet_colorIn(texCoords); + + if ( c.r > EPSILON || c.g > EPSILON || c.b > EPSILON) { + float d = float(x * x + y * y ); + if (d < dist2) { + dist2 = d; + color = c; + } + } +} + +// Used by testOctants() to scan each horizontal line in the circle. +// Returns the distance^2 and color of the non-black texel that is closest to +// the current texel. +void testHLine(int x0, int x1, int y, REF(thread,float) dist2, REF(thread,vec4) color) { + for (int x = x0; x < x1; x++) { + testTexel(x, y, dist2, color); + } +} + +// Part of the Bresenham algorithm: mirror the circle octant edge coords to +// obtain a half circle and then mirror them horizontally to obtain the +// horizontal lines that define the circle and its interior. +// Returns the distance^2 and color of the non-black texel that is closest to +// the current texel. +void testOctants(int x, int y, REF(thread,float) dist2, REF(thread,vec4) color) { + testHLine(-x, x, y, dist2, color); + testHLine(-x, x, -y, dist2, color); + testHLine(-y, y, x, dist2, color); + testHLine(-y, y, -x, dist2, color); +} + +// Find the color and distance^2 to the closest texel in colorIn that is not +// black and that is within a circle defined by the specified radius, centered +// in the current texel. (Note: distance^2 is used to avoid sqrt). +// This uses the Bresenham circle algorithm to discover the texels inside that +// circle, to avoid having to read the colorIn texels outside of it. +void testCircle(int r, REF(thread,float) dist2, REF(thread,vec4) color) { + if (r < 0) { r = 0; } + + int d = 3 - (2 * r); + int x = 0; + int y = r; + + testOctants(x, y, dist2, color); + + while (x < y) { + x++; + + if (d > 0) { + y--; + d = d + 4 * (x - y) + 10; + } else { + d = d + 4 * x + 6; + } + + testOctants(x, y, dist2, color); + } +} + +// If radius is 0 then render colorIn as is. If radius > 0 then render only the +// outline of the non-black areas in colorIn. The radius will define the +// thickness of the outline. The color of each texel in the outline will be +// defined by the closest color in colorIn, fading out when the distance to that +// colorIn texel equals the radius. This will allow colorIn to potentially hold +// several colors that will be respected by the outline. + +void main() +{ + hd_FragColor = vec4(0, 0, 0, 1); + // Check if the current texel is not black - meaning that we are inside an + // area of colorId that has been highlighted. + vec4 color = HgiGet_colorIn(uvOut); + bool isInside = color.r > EPSILON || color.g > EPSILON || color.b > EPSILON; + + if (enableOutline==0) { + if (isInside) { + // Inside the highlighted areas in colorIn : render colorIn. + hd_FragColor = color; + } + } else { + + if (!isInside) { + // Outside the highlighted area: render the outline + float r2 = float(radius * radius); + float dist2 = LARGE; + + // Check if there are any highlighted texels around the current one + // inside the specified radius. + testCircle(radius, dist2, color); + + // Check if the distance^2 to the closest highlighted texel is within + // radius^2. + if (dist2 <= r2) { + // Attenuate the texel color using the distance^2 to it. + float factor = (r2 - dist2 ) / r2; + factor *= factor; + + hd_FragColor = color * factor; + hd_FragColor.a = 1; + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPass.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPass.glslfx new file mode 100644 index 0000000..160fa42 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPass.glslfx @@ -0,0 +1,252 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPass.glslfx + +--- -------------------------------------------------------------------------- + +-- layout HdxRenderPass.RenderOitOpaquePixels + +[ + ["out", "vec4", "colorOut"] +] + +-- glsl HdxRenderPass.RenderOitOpaquePixels + +void RenderOutput(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + if (color.a >= 1.0) { + colorOut = vec4(color.rgb, 1); + } else { + discard; + } +} + +-- layout HdxRenderPass.WriteOitLayersToBufferCommon + +[ + ["in", "early_fragment_tests"], + ["buffer readWrite", "CounterBuffer", "hdxOitCounterBuffer", + ["atomic_int", "hdxOitCounterBuffer"] + ] +] + +-- glsl HdxRenderPass.WriteOitLayersToBufferCommon + +void RenderOutputImpl(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + #if defined(HD_HAS_hdxOitDataBuffer) + + const int screenWidth = int(HdGet_oitScreenSize().x); + const int screenHeight = int(HdGet_oitScreenSize().y); + // Must match the per-pixel sample count used when creating the OIT buffers. + // (See HdxOitResolveTask::_PrepareOitBuffers) + const int numSamples = 8; + + const int dataBufferSize = screenWidth * screenHeight * numSamples; + const int counterBufferSize = screenWidth * screenHeight + 1; + + // +1 because the counter buffer is cleared with -1, but we want the + // first index to start at 0. + int writeIndex = ATOMIC_ADD(hdxOitCounterBuffer[0], 1) + 1; + + if (writeIndex < dataBufferSize) { + int screenIndex = + int(gl_FragCoord.x) + int(gl_FragCoord.y) * screenWidth; + + if (screenIndex < counterBufferSize) { + int prevIndex = + ATOMIC_EXCHANGE(hdxOitCounterBuffer[screenIndex+1], writeIndex); + hdxOitDataBuffer[writeIndex] = color; + + // Note that we have a choice here to either pick gl_FragCoord.z or + // the depth value from Peye. The former is obtained by applying + // the perspective transform and cannot be changed by a shader. + // + // We pick Peye here so that a shader has an opportunity to change + // the depth of the sample inserted into the OIT list. + // + // This is used by volumes. However, non-volume translucent + // geometry should never modify Peye value and call RenderOutput + // with the Peye value from the vertex shader so that it is + // consistent with gl_FragCoord. This is because such geometry is + // subject to a opaque pixel render pass performing a z-test + // against gl_FragCoord.z. + // + // Note there are implications of using the depth value from Peye + // instead of gl_FragCoord.z here for the subsequent OIT resolve + // shader: the depth sorting order needs to be flipped and the + // OIT resolve shader cannot compare depths in the OIT list + // against the depth buffer unless it takes the perspective + // transform into account. + hdxOitDepthBuffer[writeIndex] = Peye.z / Peye.w; + hdxOitIndexBuffer[writeIndex] = prevIndex; + } + } else { + // We may overrun the counter buffer integer and wrap back to 0 if + // we have a lot of OIT samples. + ATOMIC_ADD(hdxOitCounterBuffer[0], -1); + } + + #endif +} + +-- glsl HdxRenderPass.WriteOitLayersToBufferTranslucent + +void RenderOutput(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + // There are two render passes for ordinary OIT geometry. + // Fragments with alpha >= 1.0 are handled in the first (opaque) + // render pass. + if (color.a < 1.0 && color.a > 0.0001) { + RenderOutputImpl(Peye, Neye, color, patchCoord); + } +} + +-- glsl HdxRenderPass.WriteOitLayersToBufferVolume + +void RenderOutput(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + // Unlike ordinary OIT geometry, volumes have only one render pass, + // so insert into OIT buffers even if alpha is 1. + if (any(greaterThan(color, vec4(0.0001)))) { + RenderOutputImpl(Peye, Neye, color, patchCoord); + } +} + +-- layout HdxRenderPass.RenderPick + +[ + ["out", "vec4", "primIdOut"], + ["out", "vec4", "instanceIdOut"], + ["out", "vec4", "elementIdOut"], + ["out", "vec4", "edgeIdOut"], + ["out", "vec4", "pointIdOut"], + ["out", "vec4", "neyeOut"] +] + +-- glsl HdxRenderPass.RenderPick + +vec4 IntToVec4(int id) +{ + return vec4(((id >> 0) & 0xff) / 255.0, + ((id >> 8) & 0xff) / 255.0, + ((id >> 16) & 0xff) / 255.0, + ((id >> 24) & 0xff) / 255.0); +} + +// Fwd declare necessary methods to determine the subprim id of a fragment. +FORWARD_DECL(int GetElementID()); // generated via codeGen +FORWARD_DECL(int GetPrimitiveEdgeId()); // defined in edgeId.glslfx, or generated via codeGen +FORWARD_DECL(int GetPointId()); // defined in pointId.glslfx + +void RenderOutput(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + int primId = HdGet_primID(); + primIdOut = IntToVec4(primId); + + // instanceIndex is a tuple of integers (num nested levels). + // for picking, we store global instanceId (instanceIndex[0]) in the + // selection framebuffer and then reconstruct the tuple in postprocess. + int instanceId = GetDrawingCoord().instanceIndex[0]; + instanceIdOut = IntToVec4(instanceId); + + elementIdOut = IntToVec4(GetElementID()); + edgeIdOut = IntToVec4(GetPrimitiveEdgeId()); + pointIdOut = IntToVec4(GetPointId()); + + neyeOut = IntToVec4(hd_vec4_2_10_10_10_set(vec4(Neye,0))); +} + +-- layout HdxRenderPass.RenderColorAndSelection + +[ + ["out", "vec4", "colorOut"], + ["out", "float", "selectedOut"] +] + +-- glsl HdxRenderPass.RenderColorAndSelection + +// Note: This mixin expects color and selected color attachments in the bound +// FBO. It writes out the computed fragment color and whether it is selected +// (as a float, so, 1.0 or 0.0). + +bool IsSelected(); // defined in selection.glslfx + +void RenderOutput(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + colorOut = color; + selectedOut = float(IsSelected()); +} + +-- layout HdxRenderPass.RenderColorWithOccludedSelection + +[ + ["out", "vec4", "colorOut"] +] + +-- glsl HdxRenderPass.RenderColorWithOccludedSelection + +// Note: This mixin expects color and selected color attachments in the bound +// FBO. The alpha component of the computed fragment color is adjusted to blend +// the existing (destination) fragment color if it is selected. + +bool +HasOccludedSelection(vec2 fragcoord) +{ +#ifdef HD_HAS_selectedReadback + const float isSelected = texelFetch(HdGetSampler_selectedReadback(), + ivec2(fragcoord), + 0).x; + return bool(isSelected); +#endif + return false; +} + +float +GetShowThroughOpacity() +{ +#ifdef HD_HAS_occludedSelectionOpacity + // Note: occludedSelectionOpacity flows in as a parameter to the selection + // setup task (HdxSelectionTask) + const float dstOpacity = HdGet_occludedSelectionOpacity(); + // adjust source alpha used to blend source and dst colors. + return 1.0 - dstOpacity; +#else + return 0.5; +#endif +} + +// Input AOV textures are provided by the task. +void RenderOutput(vec4 Peye, vec3 Neye, vec4 color, vec4 patchCoord) +{ + if (HasOccludedSelection(gl_FragCoord.xy)) { + color.a = GetShowThroughOpacity(); + } + + colorOut = color; +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassColorAndSelectionShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassColorAndSelectionShader.glslfx new file mode 100644 index 0000000..0cfb6b0 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassColorAndSelectionShader.glslfx @@ -0,0 +1,75 @@ +-- glslfx version 0.1 + +// +// Copyright 2020 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassColorAndSelectionShader.glslfx +// Render pass shader configuration that writes color and selection information +// (whether the fragment is selected). + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/selection.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "Selection.DecodeUtils", + "Selection.ComputeColor", + "Selection.Fragment", + "RenderPass.ApplyColorOverrides", + "HdxRenderPass.RenderColorAndSelection" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassColorShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassColorShader.glslfx new file mode 100644 index 0000000..ab46885 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassColorShader.glslfx @@ -0,0 +1,72 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassColorShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/selection.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "Selection.DecodeUtils", + "Selection.ComputeColor", + "RenderPass.ApplyColorOverrides", + "RenderPass.RenderColor" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassColorWithOccludedSelectionShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassColorWithOccludedSelectionShader.glslfx new file mode 100644 index 0000000..c3966f1 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassColorWithOccludedSelectionShader.glslfx @@ -0,0 +1,74 @@ +-- glslfx version 0.1 + +// +// Copyright 2020 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassColorWithOccludedSelectionShader.glslfx +// Render pass shader configuration that writes color and selection information +// (whether the fragment is selected). + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/selection.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "Selection.DecodeUtils", + "Selection.ComputeColor", + "RenderPass.ApplyColorOverrides", + "HdxRenderPass.RenderColorWithOccludedSelection" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassIdShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassIdShader.glslfx new file mode 100644 index 0000000..b61546a --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassIdShader.glslfx @@ -0,0 +1,70 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassIdShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "RenderPass.NoSelection", + "RenderPass.NoColorOverrides", + "RenderPass.RenderId" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassOitOpaqueShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassOitOpaqueShader.glslfx new file mode 100644 index 0000000..b0eec37 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassOitOpaqueShader.glslfx @@ -0,0 +1,72 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassOitOpaqueShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/selection.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "Selection.DecodeUtils", + "Selection.ComputeColor", + "RenderPass.ApplyColorOverrides", + "HdxRenderPass.RenderOitOpaquePixels" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassOitShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassOitShader.glslfx new file mode 100644 index 0000000..b953175 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassOitShader.glslfx @@ -0,0 +1,73 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassOitShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/selection.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "Selection.DecodeUtils", + "Selection.ComputeColor", + "RenderPass.ApplyColorOverrides", + "HdxRenderPass.WriteOitLayersToBufferCommon", + "HdxRenderPass.WriteOitLayersToBufferTranslucent" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassOitVolumeShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassOitVolumeShader.glslfx new file mode 100644 index 0000000..e7029c0 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassOitVolumeShader.glslfx @@ -0,0 +1,72 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassOitVolumeShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/selection.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "Selection.DecodeUtils", + "Selection.ComputeColor", + "RenderPass.ApplyColorOverrides", + "HdxRenderPass.WriteOitLayersToBufferCommon", + "HdxRenderPass.WriteOitLayersToBufferVolume" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassPickingShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassPickingShader.glslfx new file mode 100644 index 0000000..4874eb3 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassPickingShader.glslfx @@ -0,0 +1,70 @@ +-- glslfx version 0.1 + +// +// Copyright 2019 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassPickingShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "RenderPass.NoSelection", + "RenderPass.NoColorOverrides", + "HdxRenderPass.RenderPick" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/renderPassShadowShader.glslfx b/blender/lib/usd/hdx/resources/shaders/renderPassShadowShader.glslfx new file mode 100644 index 0000000..393e0fe --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/renderPassShadowShader.glslfx @@ -0,0 +1,70 @@ +-- glslfx version 0.1 + +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/renderPassShadowShader.glslfx + +#import $TOOLS/hdSt/shaders/renderPass.glslfx +#import $TOOLS/hdx/shaders/renderPass.glslfx + +-- configuration +{ + "techniques": { + "default": { + "vertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "postTessVertexShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessControlShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "tessEvalShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "geometryShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.ApplyClipPlanes" ] + }, + "fragmentShader" : { + "source": [ "RenderPass.Camera", + "RenderPass.CameraFS", + "RenderPass.NoSelection", + "RenderPass.NoColorOverrides", + "RenderPass.RenderColor" ] + } + } + } +} diff --git a/blender/lib/usd/hdx/resources/shaders/selection.glslfx b/blender/lib/usd/hdx/resources/shaders/selection.glslfx new file mode 100644 index 0000000..a0d80d8 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/selection.glslfx @@ -0,0 +1,544 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +--- This is what an import might look like. +--- #import $TOOLS/hdx/shaders/selection.glslfx + +--- -------------------------------------------------------------------------- +-- glsl Selection.DecodeUtils +/// Decoding utilities for selection highlighting. +/// This mixin may be safely included by any shader stage. Its lone dependency +/// on code generation is the hdxSelectionBuffer SSBO that holds the encoded +/// selection state. + +// This should match the HdSelection enum. +int GetMaxNumSelectionHighlightModes() +{ + return 2; +} + +#if defined(HD_HAS_hdxSelectionBuffer) +// helper methods to decode hdxSelectionBuffer +// ------ selection highlight mode specifics ------- +bool HighlightModeHasSelection(int offset) +{ + return (offset != 0); +} + +int GetIndexOffsetForHighlightMode(int mode) +{ + return HdGet_hdxSelectionBuffer(mode + 1/*[0] holds #modes*/); +} + +int GetNumSelectionHighlightModes() +{ + return HdGet_hdxSelectionBuffer(0); +} + +// ------ selection offset specifics ------- +// Note: This should match the selection offset encoding in +// hdx/SelectionTracker.cpp +void DecodeSelOffset(int selOffset, REF(thread, bool) isSelected, + REF(thread, int) nextOffset) +{ + isSelected = bool(selOffset & 0x1); // bit 0 + nextOffset = selOffset >> 1; // bits 31:1 +} + +// --------- subprim decoding ---------- +// Each subprims offsets' buffer encoding is: +// [subprim-type][min][max][ selOffsets ] +// <----------3 ----------><--- max - min + 1 --> +struct SubprimHeader { + int type; + int min; + int max; +}; +SubprimHeader DecodeSubprimHeader(int offset) +{ + SubprimHeader header; + header.type = HdGet_hdxSelectionBuffer(offset ); + header.min = HdGet_hdxSelectionBuffer(offset + 1); + header.max = HdGet_hdxSelectionBuffer(offset + 2); + return header; +} + +bool IsSubprimSelected(int id, int min, int max, int headerStart, + REF(thread, int) nextSubprimOffset) +{ + const int SUBPRIM_SELOFFSETS_HEADER_SIZE = 3; + + nextSubprimOffset = 0; // initialize + bool isSelected = false; + + if (id >= min && id < max) { + int netSubprimIndex = + headerStart + SUBPRIM_SELOFFSETS_HEADER_SIZE + id - min; + int selOffset = HdGet_hdxSelectionBuffer(netSubprimIndex); + + DecodeSelOffset(selOffset, /*out*/isSelected, /*out*/nextSubprimOffset); + } else { + // The subprim id does not fall in the selected id range, so the subprim + // in question isn't selected. However, we can still have other + // subprim(s) selected. To get the offset to jump to, we mimic decoding + // the "min" subprim id. + int minSubprimIndex = headerStart + SUBPRIM_SELOFFSETS_HEADER_SIZE; + DecodeSelOffset(HdGet_hdxSelectionBuffer(minSubprimIndex), + /*out*/isSelected, /*out*/nextSubprimOffset); + isSelected = false; + } + + return isSelected; +} +#endif // HD_HAS_hdxSelectionBuffer + +// Keep the result struct definition outside guards. +struct SubprimSelectionResult { + bool elementSelected; + bool edgeSelected; + bool pointSelected; + int pointColorId; +}; + +SubprimSelectionResult InitSubprimSelectionResult( + bool _elementSelected, + bool _edgeSelected, + bool _pointSelected, + int _pointColorId) +{ + SubprimSelectionResult res; + + res.elementSelected = _elementSelected; + res.edgeSelected = _edgeSelected; + res.pointSelected = _pointSelected; + res.pointColorId = _pointColorId; + + return res; +} + +bool IsAnySubprimSelected(SubprimSelectionResult s) +{ + return (s.elementSelected || s.edgeSelected || s.pointSelected); +} +bool HasCustomizedPointColor(SubprimSelectionResult s) +{ + // Use -1 to encode selected points that don't have a custom point color. + return (s.pointColorId != -1); +} + +#if defined(HD_HAS_hdxSelectionBuffer) +// Note: These should match the SubprimType enum in hdx/SelectionTracker.cpp +#define SUBPRIM_TYPE_ELEMENT 0 +#define SUBPRIM_TYPE_EDGE 1 +#define SUBPRIM_TYPE_POINT 2 +#define SUBPRIM_TYPE_INSTANCE 3 + +SubprimSelectionResult GetSubprimSel(int offset, + int elementId, int edgeId, int pointId) +{ + SubprimSelectionResult s = InitSubprimSelectionResult(false, false, false, 0); + + int nextSubprimOffset = 0; + SubprimHeader header = DecodeSubprimHeader(offset); + + if (header.type == SUBPRIM_TYPE_ELEMENT) { + s.elementSelected = + IsSubprimSelected(elementId, header.min, header.max, + offset, /*out*/nextSubprimOffset); + + if (nextSubprimOffset != 0) { + // fragment has additional subprim(s) selected. update header. + header = DecodeSubprimHeader(nextSubprimOffset); + offset = nextSubprimOffset; + } + } + + if (header.type == SUBPRIM_TYPE_EDGE) { + s.edgeSelected = + IsSubprimSelected(edgeId, header.min, header.max, + offset, /*out*/nextSubprimOffset); + + if (nextSubprimOffset != 0) { + // fragment has points selected. update header. + header = DecodeSubprimHeader(nextSubprimOffset); + offset = nextSubprimOffset; + } + } + + if (header.type == SUBPRIM_TYPE_POINT) { + s.pointSelected = + IsSubprimSelected(pointId, header.min, header.max, + offset, /*unused*/nextSubprimOffset); + // For points alone, since there isn't any subprim to follow it, the + // offset field is overriden to represent the index into the + // selectedPointColors buffer to support customized coloring of a set of + // selected points. + s.pointColorId = nextSubprimOffset; + } + + return s; +} + +// --------- instance decoding ---------- +bool IsInstanceSelected(int offset, REF(thread, int) nextOffset) +{ + // If we don't find an instance subprim block, pass the same offset to + // GetSubprimSel. + nextOffset = offset; + bool sel = false; + + int instanceId = GetDrawingCoord().instanceIndex[0]; + + SubprimHeader header = DecodeSubprimHeader(offset); + if (header.type == SUBPRIM_TYPE_INSTANCE) { + sel = + IsSubprimSelected(instanceId, header.min, header.max, + offset, /*out*/nextOffset); + } + + return sel; +} +#endif // HD_HAS_hdxSelectionBuffer + +// --------- selection buffer decoding entry point ---------- +struct SelectionResult { + bool primOrInstanceSel; + SubprimSelectionResult subprimSel; +}; + +SelectionResult InitSelectionResult( + bool _primOrInstanceSel, + bool _elementSelected, + bool _edgeSelected, + bool _pointSelected, + int _pointColorId) +{ + SelectionResult res; + + res.primOrInstanceSel = _primOrInstanceSel; + res.subprimSel = InitSubprimSelectionResult( + _elementSelected, _edgeSelected, + _pointSelected, _pointColorId); + + return res; +} + + +// Decodes the selection buffer encoding scheme for a given mode, and returns +// the selection result. +SelectionResult GetSelectionResult(int mode, + int elementId, int edgeId, int pointId) +{ + SelectionResult res = InitSelectionResult(false, false, false, false, 0); + +#if defined(HD_HAS_hdxSelectionBuffer) + // The hdxSelectionBuffer layout is: + // [#modes] [per-mode offset] [data mode0] ... [data modeM] + // [---------header---------] + // Each mode's data is laid out as: + // [ prims | points | edges | elements | instance level-N | ... | level 0 ] + // <-------- subprims -------> <----------- instances ---------> + // <---------------------- per prim ----------------------------> + // See hdx/SelectionTracker.cpp for details on the encoding scheme. + int modeOffset = GetIndexOffsetForHighlightMode(mode); + if (!HighlightModeHasSelection(modeOffset)) { + // highlight mode has no selected objects (prims/instances/elements) + return res; + } + + const int PRIM_SELOFFSETS_HEADER_SIZE = 2; + const int primId = HdGet_primID(); + int smin = HdGet_hdxSelectionBuffer(modeOffset); + int smax = HdGet_hdxSelectionBuffer(modeOffset + 1); + + if (primId >= smin && primId < smax) { + int offset = modeOffset + PRIM_SELOFFSETS_HEADER_SIZE + primId - smin; + int nextOffset = 0; + bool sel = false; + DecodeSelOffset(HdGet_hdxSelectionBuffer(offset), + /*out*/sel, /*out*/nextOffset); + + // At this point, sel indicates whether the fragment corresponds to + // a prim that needs to be fully highlighted, while a non-zero + // nextOffset indicates whether additional decoding may be done. + + // We don't currently differentiate between prim, instance and + // subprim selection highlighting (i.e., visually, they look the + // same), and thus can skip additional decoding if sel is true. + // We choose not to, for ease of future customization. + if (nextOffset != 0) { + // check if instance (or) subprim(s) are selected + offset = nextOffset; + sel = sel || IsInstanceSelected(offset, /*out*/nextOffset); + + if (nextOffset != 0) { + res.subprimSel = + GetSubprimSel(nextOffset, elementId, edgeId, pointId); + } + } + res.primOrInstanceSel = sel; + } +#endif // HD_HAS_hdxSelectionBuffer + + return res; +} + +// Returns the logical OR of the inputs +SelectionResult +_CombineWithOr(SelectionResult a, SelectionResult b) +{ + SelectionResult res = InitSelectionResult(false, false, false, false, 0); + + res.primOrInstanceSel = a.primOrInstanceSel || b.primOrInstanceSel; + res.subprimSel.elementSelected = a.subprimSel.elementSelected || + b.subprimSel.elementSelected; + res.subprimSel.edgeSelected = a.subprimSel.edgeSelected || + b.subprimSel.edgeSelected; + res.subprimSel.pointSelected = a.subprimSel.pointSelected || + b.subprimSel.pointSelected; + // pointColorIndex is ignored. + return res; +} + +// Returns the net selection result aggregating the result of each selection +// mode. +SelectionResult GetNetSelectionResult(int elementId, int edgeId, int pointId) +{ + SelectionResult netResult = InitSelectionResult(false, false, false, false, 0); + +#if defined(HD_HAS_hdxSelectionBuffer) + const int numSelectionModes = GetNumSelectionHighlightModes(); + for (int mode = 0; mode < numSelectionModes; mode++) { + SelectionResult modeResult = + GetSelectionResult(mode, elementId, edgeId, pointId); + netResult = _CombineWithOr(modeResult, netResult); + } +#endif + return netResult; +} + +--- -------------------------------------------------------------------------- +-- glsl Selection.Vertex.PointSel +// Mixin for use in the vertex shader stage. +// Decodes the selection buffer to find out if the current vertex (point) is +// selected. This is called from hdSt/shaders/pointId.glslfx +bool IsPointSelected(int pointId) +{ + bool sel = false; + #if defined(HD_HAS_hdxSelectionBuffer) + const int numSelectionModes = GetNumSelectionHighlightModes(); + for (int mode = 0; mode < numSelectionModes; mode++) { + // At the VS stage, we don't carry enough state to determine the + // elementId and edgeId. So, use fallback values instead. + SelectionResult res = + GetSelectionResult(mode, /*elementId*/-1, /*edgeId*/-1, pointId); + if (res.subprimSel.pointSelected && + !HasCustomizedPointColor(res.subprimSel)) { + sel = true; + break; + } + } // for each highlight mode + #endif + return sel; +} + +--- -------------------------------------------------------------------------- +-- glsl Selection.Geometry.ElementSel +// Mixin for use in the geometry shader stage. +// Helper functions to determine if the element (face) is selected for either +// a given selection mode, or any of the modes. + +FORWARD_DECL(int GetElementID()); // code gen + +bool IsElementSelected(int mode) +{ + bool sel = false; + #if defined(HD_HAS_hdxSelectionBuffer) + const int numSelectionModes = GetNumSelectionHighlightModes(); + if (mode < numSelectionModes) { + SelectionResult res = + GetSelectionResult(mode, GetElementID(), + /*edgeId*/-1, /*pointId*/-1); + if (res.subprimSel.elementSelected) { + sel = true; + } + } + #endif + return sel; +} + +bool IsSelected() +{ + // Edge and point selection aren't relevant at the GS stage. + SelectionResult res = GetNetSelectionResult( + GetElementID(), /*edgeId*/-1, /*pointId*/-1); + + return res.primOrInstanceSel || res.subprimSel.elementSelected; +} + +--- -------------------------------------------------------------------------- +-- glsl Selection.Geometry.WireSelOffset +// Mixin for use in the geometry shader stage for wireframe rendering. +// See comment below. + +FORWARD_DECL(int GetMaxNumSelectionHighlightModes()); +FORWARD_DECL(bool IsElementSelected(int mode)); +FORWARD_DECL(int GetElementID()); // code gen + +vec4 ComputeSelectionOffset() +{ + // For wireframe, we only render the edges of faces. Because we don't have + // a way to control which face gets rasterized first, if face A is selected + // and face B is unselected, it's possible for face B to draw over face A + // and for the highlight to be dropped. + + // To compensate for this, since we're running the geometry shader anyway, + // add a small polygon offset for selected faces, to push them in front of + // unselected faces. + + // For doubly-selected faces (e.g. rollover & selection), apply a double + // offset. The code below chooses a unique offset for each combination + // of selection modes by constructing a bitmask of selection modes for + // this face, and reinterpreting it as an integer. + + int offset = 0; + + int numModes = GetMaxNumSelectionHighlightModes(); + for (int mode = 0; mode < numModes; ++mode) { + if (IsElementSelected(mode)) { + offset += (1 << mode); + } + } + + // Note: for our base epsilon, we're choosing this somewhat arbitrarily. + // This is an eye space epsilon, so it's not dependent on distance-from- + // camera. Rather, it's dependent on scene scale. 1e-3 works well for + // our scenes, but if this causes trouble we could use an epsilon based on + // the eye-space distance of a z-buffer bit flip, which would match the + // behavior of glPolygonOffset better, at the cost of more math and some + // extra shader uniforms. + float eps = 1e-3; + + return vec4(0, 0, offset * eps, 0); +} + +--- -------------------------------------------------------------------------- +-- glsl Selection.Geometry.WireSelNoOffset + +vec4 ComputeSelectionOffset() +{ + return vec4(0); +} + +--- -------------------------------------------------------------------------- +-- glsl Selection.Fragment +// Mixin for use in the fragment shader stage to determine if the fragment is +// selected. + +FORWARD_DECL(int GetElementID()); // generated via codeGen +FORWARD_DECL(int GetPrimitiveEdgeId()); // defined in edgeId.glslfx, or generated via codeGen +FORWARD_DECL(int GetPointId()); // defined in pointId.glslfx + +bool IsSelected() +{ + SelectionResult res = GetNetSelectionResult( + GetElementID(), GetPrimitiveEdgeId(), GetPointId()); + + return res.primOrInstanceSel || res.subprimSel.elementSelected; +} + +--- -------------------------------------------------------------------------- +-- glsl Selection.ComputeColor + +#if defined(HD_HAS_hdxSelectionBuffer) +vec4 GetSelectionColor(int mode) +{ + vec4 s = vec4(0,0,0,0); + // XXX: Make selection colors an array so we can avoid the branching. + if (mode == 0) + s = HdGet_selColor(); + else if (mode == 1) + s = HdGet_selLocateColor(); + + return s; +} + +// fwd decl fn defined in edgeId.glslfx or generated via codeGen +FORWARD_DECL(float GetSelectedEdgeOpacity()); + +vec4 GetSubprimSelectionColor(int mode, SubprimSelectionResult res) +{ + vec4 s = GetSelectionColor(mode); + if (res.edgeSelected) { + s.a = GetSelectedEdgeOpacity(); + } + if (res.pointSelected && HasCustomizedPointColor(res)) { + #if defined(HD_HAS_selectionPointColors) + s = HdGet_selectionPointColors(res.pointColorId); + #endif + } + + return s; +} +#endif + +// Fwd declare necessary methods to determine the subprim id of a fragment. +FORWARD_DECL(int GetElementID()); // generated via codeGen +FORWARD_DECL(int GetPrimitiveEdgeId()); // defined in edgeId.glslfx, or generated via codeGen +FORWARD_DECL(int GetPointId()); // defined in pointId.glslfx + +// Decodes the selection buffer to find out if the current fragment is from +// a prim/instance/subprim that is selected, applies selection highlighting to +// the incoming color, and returns the resulting color. +vec4 ApplySelectionColor(vec4 color) +{ +#if defined(HD_HAS_hdxSelectionBuffer) + int elementId = GetElementID(); + int edgeId = GetPrimitiveEdgeId(); + int pointId = GetPointId(); + + const int numSelectionModes = GetNumSelectionHighlightModes(); + + bool isSelected = false; + for (int mode = 0; mode < numSelectionModes; mode++) { + SelectionResult res = + GetSelectionResult(mode, elementId, edgeId, pointId); + + if (res.primOrInstanceSel) { + isSelected = true; + vec4 s = GetSelectionColor(mode); + color.rgb = mix(color.rgb, s.rgb, s.a); + } + if (IsAnySubprimSelected(res.subprimSel)) { + isSelected = true; + vec4 ss = GetSubprimSelectionColor(mode, res.subprimSel); + color.rgb = mix(color.rgb, ss.rgb, ss.a); + } + } // for each highlight mode + +#endif + return color; +} diff --git a/blender/lib/usd/hdx/resources/shaders/skydome.glslfx b/blender/lib/usd/hdx/resources/shaders/skydome.glslfx new file mode 100644 index 0000000..5f460f9 --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/skydome.glslfx @@ -0,0 +1,84 @@ +-- glslfx version 0.1 + +// +// Copyright 2021 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "SkydomeFragment": { + "source": [ "Skydome.Fragment" ] + } + } + } +} + +-- glsl Skydome.Fragment + +const float PI = 3.1415926535898; +const float farPlane = 1.0; + +float wrapSampleValue(float value) +{ + if (value < 0.0) { + value += 1.0; + } + else if (value > 1.0) { + value -= 1.0; + } + return value; +} + +// from PreviewSurface.glslfx +vec2 projectToLatLong(vec3 sample3D) +{ + // project spherical coord onto latitude-longitude map with + // latitude: +y == pi/2 and longitude: +z == 0, +x == pi/2 + float x = (atan(sample3D.z, sample3D.x) + 0.5 * PI) / (2.0 * PI); + float y = acos(sample3D.y) / PI; + + return vec2(wrapSampleValue(x), wrapSampleValue(y)); +} + + +void main(void) +{ + // Transform the UV coordinates into NDC space and place at the far plane + // (z = 1) before transforming into view space. + vec2 uvOut_ndc = (uvOut * vec2(2.0)) - vec2(1.0); + vec4 uvOut_view = invProjMatrix * vec4(uvOut_ndc, farPlane, 1.0); + + // Normalize to use as the initial sampleDirection + vec3 sampleDirection = normalize(uvOut_view.xyz); + + // Apply the camera rotation and lightTransform to the sampleDirection + sampleDirection = + ( lightTransform * viewToWorld * vec4(sampleDirection, 0.0) ).xyz; + + // Sample Skydome Texture with the sampleDirection + vec2 sampleCoord = projectToLatLong(sampleDirection); + hd_FragColor = vec4(HgiGet_skydomeTexture(sampleCoord).xyz, 1.0); + gl_FragDepth = farPlane; +} diff --git a/blender/lib/usd/hdx/resources/shaders/visualize.glslfx b/blender/lib/usd/hdx/resources/shaders/visualize.glslfx new file mode 100644 index 0000000..7b3c62e --- /dev/null +++ b/blender/lib/usd/hdx/resources/shaders/visualize.glslfx @@ -0,0 +1,124 @@ +-- glslfx version 0.1 + +// +// Copyright 2021 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + "VisualizeVertex": { + "source": [ "Visualize.Vertex" ] + }, + "VisualizeFragmentDepth": { + "source": [ "Visualize.Fragment.Depth" ] + }, + "VisualizeFragmentId": { + "source": [ "Visualize.Fragment.Id" ] + }, + "VisualizeFragmentNormal": { + "source": [ "Visualize.Fragment.Normal" ] + }, + "VisualizeFragmentFallback": { + "source": [ "Visualize.Fragment.Fallback" ] + } + } + } +} + +-- glsl Visualize.Vertex + +void main(void) +{ + gl_Position = position; + uvOut = uvIn; +} + +-- glsl Visualize.Fragment.Depth + +// Re-normalize clip space depth in the range [0.0, 1.0] to the range [min, max] +// to allow better visualization of depth differences. +float normalizeDepth(float depth) +{ + float min = minMaxDepth.x, max = minMaxDepth.y; + return (depth - min) / (max - min); +} + +// Display the renormalized depth as a grayscale image. +void main(void) +{ + vec2 fragCoord = uvOut * screenSize; + float depth = HgiTexelFetch_depthIn(ivec2(fragCoord)).x; + hd_FragColor = vec4( vec3(normalizeDepth(depth)), 1.0 ); +} + +-- glsl Visualize.Fragment.Id + +// Convert a 32 bit integer into a vec3 color. +vec3 IntToVec3(int id) +{ + // Create a 24 bit value by XORing the leading 8 bits with the remaining + // 24 bits. + int leadBits = id >> 24; + int restBits = (id << 8) >> 8; + int result = restBits ^ leadBits; + + return vec3(((result >> 0) & 0xff) / 255.0, + ((result >> 8) & 0xff) / 255.0, + ((result >> 16) & 0xff) / 255.0); +} + +// Convert a 32 bit integer representing the ID of a primitive or sub-primitive +// into a color such that consecutive IDs generally map to different colors. +void main(void) +{ + vec2 fragCoord = uvOut * screenSize; + int id = int(HgiTexelFetch_idIn(ivec2(fragCoord)).x); + int vizId = id * 11629091; // prime number near ln(2) * 2^24 + hd_FragColor = vec4(IntToVec3(vizId), 1.0); +} + +-- glsl Visualize.Fragment.Normal + +// [-1,1] to [0,1] +vec3 renormalize(vec3 normal) +{ + return 0.5 * normal + 0.5; +} + +void main(void) +{ + vec2 fragCoord = uvOut * screenSize; + vec3 normal = HgiTexelFetch_normalIn(ivec2(fragCoord)).xyz; + hd_FragColor = vec4(renormalize(normal), 1.0); +} + +-- glsl Visualize.Fragment.Fallback + +void main(void) +{ + vec2 fragCoord = uvOut * screenSize; + // Force conversion to a vector of floats. + hd_FragColor = vec4(HgiTexelFetch_aovIn(ivec2(fragCoord))); +} diff --git a/blender/lib/usd/hdx/resources/textures/StinsonBeach.hdr b/blender/lib/usd/hdx/resources/textures/StinsonBeach.hdr new file mode 100644 index 0000000..a7d1590 Binary files /dev/null and b/blender/lib/usd/hdx/resources/textures/StinsonBeach.hdr differ diff --git a/blender/lib/usd/hdx/resources/textures/StinsonBeach.tex b/blender/lib/usd/hdx/resources/textures/StinsonBeach.tex new file mode 100644 index 0000000..9a2d511 Binary files /dev/null and b/blender/lib/usd/hdx/resources/textures/StinsonBeach.tex differ diff --git a/blender/lib/usd/hgiGL/resources/plugInfo.json b/blender/lib/usd/hgiGL/resources/plugInfo.json new file mode 100644 index 0000000..cd0d910 --- /dev/null +++ b/blender/lib/usd/hgiGL/resources/plugInfo.json @@ -0,0 +1,18 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "HgiGL" : { + "bases": ["Hgi"] + } + } + }, + "LibraryPath": "", + "Name": "hgiGL", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/hio/resources/plugInfo.json b/blender/lib/usd/hio/resources/plugInfo.json new file mode 100644 index 0000000..f8e9825 --- /dev/null +++ b/blender/lib/usd/hio/resources/plugInfo.json @@ -0,0 +1,20 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "Hio_StbImage" : { + "bases": ["HioImage"], + "imageTypes": ["bmp", "jpg", "jpeg", "png", "tga", "hdr"], + "precedence": 2 + } + } + }, + "LibraryPath": "", + "Name": "hio", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/hioOpenVDB/resources/plugInfo.json b/blender/lib/usd/hioOpenVDB/resources/plugInfo.json new file mode 100644 index 0000000..7a968f0 --- /dev/null +++ b/blender/lib/usd/hioOpenVDB/resources/plugInfo.json @@ -0,0 +1,20 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "HioOpenVDB_TextureData" : { + "bases": ["HioFieldTextureData"], + "fieldDataTypes": ["vdb"], + "precedence": 0 + } + } + }, + "LibraryPath": "", + "Name": "hioOpenVDB", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/ndr/resources/plugInfo.json b/blender/lib/usd/ndr/resources/plugInfo.json new file mode 100644 index 0000000..bcbf241 --- /dev/null +++ b/blender/lib/usd/ndr/resources/plugInfo.json @@ -0,0 +1,21 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "NdrDiscoveryPlugin": {}, + "_NdrFilesystemDiscoveryPlugin" : { + "bases": ["NdrDiscoveryPlugin"], + "displayName": "Filesystem Discovery" + }, + "NdrParserPlugin": {} + } + }, + "LibraryPath": "", + "Name": "ndr", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/plugInfo.json b/blender/lib/usd/plugInfo.json new file mode 100644 index 0000000..1276914 --- /dev/null +++ b/blender/lib/usd/plugInfo.json @@ -0,0 +1,5 @@ +{ + "Includes": [ + "*/resources/" + ] +} diff --git a/blender/lib/usd/sdf/resources/plugInfo.json b/blender/lib/usd/sdf/resources/plugInfo.json new file mode 100644 index 0000000..68fc2ae --- /dev/null +++ b/blender/lib/usd/sdf/resources/plugInfo.json @@ -0,0 +1,36 @@ +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "payloadAssetDependencies": { + "appliesTo": "prims", + "displayGroup": "Pipeline", + "type": "asset[]" + } + }, + "Types": { + "SdfFileFormat": { + "displayName": "Sdf file format base class", + "target": "sdf" + }, + "SdfTextFileFormat": { + "bases": [ + "SdfFileFormat" + ], + "displayName": "Sdf Text File Format", + "extensions": [ + "sdf" + ], + "formatId": "sdf" + } + } + }, + "LibraryPath": "", + "Name": "sdf", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usd/resources/codegenTemplates/api.h b/blender/lib/usd/usd/resources/codegenTemplates/api.h new file mode 100644 index 0000000..eb2adac --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/api.h @@ -0,0 +1,47 @@ +// +// Copyright 2017 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// +#ifndef {{ Upper(libraryName) }}_API_H +#define {{ Upper(libraryName) }}_API_H + +#include "pxr/base/arch/export.h" + +#if defined(PXR_STATIC) +# define {{ Upper(libraryName) }}_API +# define {{ Upper(libraryName) }}_API_TEMPLATE_CLASS(...) +# define {{ Upper(libraryName) }}_API_TEMPLATE_STRUCT(...) +# define {{ Upper(libraryName) }}_LOCAL +#else +# if defined({{ Upper(libraryName) }}_EXPORTS) +# define {{ Upper(libraryName) }}_API ARCH_EXPORT +# define {{ Upper(libraryName) }}_API_TEMPLATE_CLASS(...) ARCH_EXPORT_TEMPLATE(class, __VA_ARGS__) +# define {{ Upper(libraryName) }}_API_TEMPLATE_STRUCT(...) ARCH_EXPORT_TEMPLATE(struct, __VA_ARGS__) +# else +# define {{ Upper(libraryName) }}_API ARCH_IMPORT +# define {{ Upper(libraryName) }}_API_TEMPLATE_CLASS(...) ARCH_IMPORT_TEMPLATE(class, __VA_ARGS__) +# define {{ Upper(libraryName) }}_API_TEMPLATE_STRUCT(...) ARCH_IMPORT_TEMPLATE(struct, __VA_ARGS__) +# endif +# define {{ Upper(libraryName) }}_LOCAL ARCH_HIDDEN +#endif + +#endif diff --git a/blender/lib/usd/usd/resources/codegenTemplates/plugInfo.json b/blender/lib/usd/usd/resources/codegenTemplates/plugInfo.json new file mode 100644 index 0000000..a621878 --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/plugInfo.json @@ -0,0 +1,15 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + } + }, + "LibraryPath": "@PLUG_INFO_LIBRARY_PATH@", + "Name": "{{ libraryName }}", + "ResourcePath": "@PLUG_INFO_RESOURCE_PATH@", + "Root": "@PLUG_INFO_ROOT@", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usd/resources/codegenTemplates/schemaClass.cpp b/blender/lib/usd/usd/resources/codegenTemplates/schemaClass.cpp new file mode 100644 index 0000000..487484a --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/schemaClass.cpp @@ -0,0 +1,411 @@ +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// +#include "{{ libraryPath }}/{{ cls.GetHeaderFile() }}" +#include "pxr/usd/usd/schemaRegistry.h" +#include "pxr/usd/usd/typed.h" + +#include "pxr/usd/sdf/types.h" +#include "pxr/usd/sdf/assetPath.h" + +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} +#include "pxr/base/tf/staticTokens.h" + +{% endif %} +{% if useExportAPI %} +{{ namespaceOpen }} + +{% endif %} +// Register the schema with the TfType system. +TF_REGISTRY_FUNCTION(TfType) +{ + TfType::Define<{{ cls.cppClassName }}, + TfType::Bases< {{ cls.parentCppClassName }} > >(); + +{% if cls.isConcrete %} + // Register the usd prim typename as an alias under UsdSchemaBase. This + // enables one to call + // TfType::Find().FindDerivedByName("{{ cls.usdPrimTypeName }}") + // to find TfType<{{ cls.cppClassName }}>, which is how IsA queries are + // answered. + TfType::AddAlias("{{ cls.usdPrimTypeName }}"); +{% endif %} +} + +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} +TF_DEFINE_PRIVATE_TOKENS( + _schemaTokens, + ({{ cls.propertyNamespacePrefix }}) +); + +{% endif %} +/* virtual */ +{{ cls.cppClassName }}::~{{ cls.cppClassName }}() +{ +} + +{% if not cls.isAPISchemaBase %} +/* static */ +{{ cls.cppClassName }} +{{ cls.cppClassName }}::Get(const UsdStagePtr &stage, const SdfPath &path) +{ + if (!stage) { + TF_CODING_ERROR("Invalid stage"); + return {{ cls.cppClassName }}(); + } +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + TfToken name; + if (!Is{{ cls.usdPrimTypeName }}Path(path, &name)) { + TF_CODING_ERROR("Invalid {{ cls.propertyNamespacePrefix }} path <%s>.", path.GetText()); + return {{ cls.cppClassName }}(); + } + return {{ cls.cppClassName }}(stage->GetPrimAtPath(path.GetPrimPath()), name); +{% else %} + return {{ cls.cppClassName }}(stage->GetPrimAtPath(path)); +{% endif %} +} + +{% if cls.isMultipleApply %} +{{ cls.cppClassName }} +{{ cls.cppClassName }}::Get(const UsdPrim &prim, const TfToken &name) +{ + return {{ cls.cppClassName }}(prim, name); +} + +/* static */ +std::vector<{{ cls.cppClassName }}> +{{ cls.cppClassName }}::GetAll(const UsdPrim &prim) +{ + std::vector<{{ cls.cppClassName }}> schemas; + + for (const auto &schemaName : + UsdAPISchemaBase::_GetMultipleApplyInstanceNames(prim, _GetStaticTfType())) { + schemas.emplace_back(prim, schemaName); + } + + return schemas; +} + +{% endif %} +{% endif %} +{% if cls.isConcrete %} +/* static */ +{{ cls.cppClassName }} +{{ cls.cppClassName }}::Define( + const UsdStagePtr &stage, const SdfPath &path) +{ + static TfToken usdPrimTypeName("{{ cls.usdPrimTypeName }}"); + if (!stage) { + TF_CODING_ERROR("Invalid stage"); + return {{ cls.cppClassName }}(); + } + return {{ cls.cppClassName }}( + stage->DefinePrim(path, usdPrimTypeName)); +} +{% endif %} +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + +/* static */ +bool +{{ cls.cppClassName }}::IsSchemaPropertyBaseName(const TfToken &baseName) +{ + static TfTokenVector attrsAndRels = { +{% for attrName in cls.attrOrder %} +{% set attr = cls.attrs[attrName] %} + UsdSchemaRegistry::GetMultipleApplyNameTemplateBaseName( + {{ tokensPrefix }}Tokens->{{ attr.name }}), +{% endfor %} +{% for relName in cls.relOrder %} +{% set rel = cls.rels[relName] %} + UsdSchemaRegistry::GetMultipleApplyNameTemplateBaseName( + {{ tokensPrefix }}Tokens->{{ rel.name }}), +{% endfor %} + }; + + return find(attrsAndRels.begin(), attrsAndRels.end(), baseName) + != attrsAndRels.end(); +} + +/* static */ +bool +{{ cls.cppClassName }}::Is{{ cls.usdPrimTypeName }}Path( + const SdfPath &path, TfToken *name) +{ + if (!path.IsPropertyPath()) { + return false; + } + + std::string propertyName = path.GetName(); + TfTokenVector tokens = SdfPath::TokenizeIdentifierAsTokens(propertyName); + + // The baseName of the {{ cls.usdPrimTypename }} path can't be one of the + // schema properties. We should validate this in the creation (or apply) + // API. + TfToken baseName = *tokens.rbegin(); + if (IsSchemaPropertyBaseName(baseName)) { + return false; + } + + if (tokens.size() >= 2 + && tokens[0] == _schemaTokens->{{ cls.propertyNamespacePrefix }}) { + *name = TfToken(propertyName.substr( + _schemaTokens->{{ cls.propertyNamespacePrefix }}.GetString().size() + 1)); + return true; + } + + return false; +} +{% endif %} + +/* virtual */ +UsdSchemaKind {{ cls.cppClassName }}::_GetSchemaKind() const +{ + return {{ cls.cppClassName }}::schemaKind; +} +{% if cls.isAppliedAPISchema %} + +/* static */ +bool +{% if not cls.isMultipleApply %} +{{ cls.cppClassName }}::CanApply( + const UsdPrim &prim, std::string *whyNot) +{% else %} +{{ cls.cppClassName }}::CanApply( + const UsdPrim &prim, const TfToken &name, std::string *whyNot) +{% endif %} +{ +{% if cls.isMultipleApply %} + return prim.CanApplyAPI<{{ cls.cppClassName }}>(name, whyNot); +{% else %} + return prim.CanApplyAPI<{{ cls.cppClassName }}>(whyNot); +{% endif %} +} + +/* static */ +{{ cls.cppClassName }} +{% if not cls.isMultipleApply %} +{{ cls.cppClassName }}::Apply(const UsdPrim &prim) +{% else %} +{{ cls.cppClassName }}::Apply(const UsdPrim &prim, const TfToken &name) +{% endif %} +{ +{% if cls.isMultipleApply %} + if (prim.ApplyAPI<{{ cls.cppClassName }}>(name)) { + return {{ cls.cppClassName }}(prim, name); + } +{% else %} + if (prim.ApplyAPI<{{ cls.cppClassName }}>()) { + return {{ cls.cppClassName }}(prim); + } +{% endif %} + return {{ cls.cppClassName }}(); +} +{% endif %} + +/* static */ +const TfType & +{{ cls.cppClassName }}::_GetStaticTfType() +{ + static TfType tfType = TfType::Find<{{ cls.cppClassName }}>(); + return tfType; +} + +/* static */ +bool +{{ cls.cppClassName }}::_IsTypedSchema() +{ + static bool isTyped = _GetStaticTfType().IsA(); + return isTyped; +} + +/* virtual */ +const TfType & +{{ cls.cppClassName }}::_GetTfType() const +{ + return _GetStaticTfType(); +} +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + +/// Returns the property name prefixed with the correct namespace prefix, which +/// is composed of the the API's propertyNamespacePrefix metadata and the +/// instance name of the API. +static inline +TfToken +_GetNamespacedPropertyName(const TfToken instanceName, const TfToken propName) +{ + return UsdSchemaRegistry::MakeMultipleApplyNameInstance(propName, instanceName); +} +{% endif %} + +{% for attrName in cls.attrOrder %} +{% set attr = cls.attrs[attrName] %} +{# Only emit Create/Get API and doxygen if apiName is not empty string. #} +{% if attr.apiName != '' %} +{% if attr.apiGet != "custom" %} +UsdAttribute +{{ cls.cppClassName }}::Get{{ Proper(attr.apiName) }}Attr() const +{ +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + return GetPrim().GetAttribute( + _GetNamespacedPropertyName( + GetName(), + {{ tokensPrefix }}Tokens->{{ attr.name }})); +{% else %} + return GetPrim().GetAttribute({{ tokensPrefix }}Tokens->{{ attr.name }}); +{% endif %} +} +{% endif %} + +UsdAttribute +{{ cls.cppClassName }}::Create{{ Proper(attr.apiName) }}Attr(VtValue const &defaultValue, bool writeSparsely) const +{ +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + return UsdSchemaBase::_CreateAttr( + _GetNamespacedPropertyName( + GetName(), + {{ tokensPrefix }}Tokens->{{ attr.name }}), +{% else %} + return UsdSchemaBase::_CreateAttr({{ tokensPrefix }}Tokens->{{ attr.name }}, +{% endif %} + {{ attr.usdType }}, + /* custom = */ {{ "true" if attr.custom else "false" }}, + {{ attr.variability }}, + defaultValue, + writeSparsely); +} + +{% endif %} +{% endfor %} +{% for relName in cls.relOrder %} +{% set rel = cls.rels[relName] %} +{# Only emit Create/Get API and doxygen if apiName is not empty string. #} +{% if rel.apiName != '' %} +{% if rel.apiGet != "custom" %} +UsdRelationship +{{ cls.cppClassName }}::Get{{ Proper(rel.apiName) }}Rel() const +{ +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + return GetPrim().GetRelationship( + _GetNamespacedPropertyName( + GetName(), + {{ tokensPrefix }}Tokens->{{ rel.name }})); +{% else %} + return GetPrim().GetRelationship({{ tokensPrefix }}Tokens->{{ rel.name }}); +{% endif %} +} +{% endif %} + +UsdRelationship +{{ cls.cppClassName }}::Create{{ Proper(rel.apiName) }}Rel() const +{ +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + return GetPrim().CreateRelationship( + _GetNamespacedPropertyName( + GetName(), + {{ tokensPrefix }}Tokens->{{ rel.name }}), +{% else %} + return GetPrim().CreateRelationship({{ tokensPrefix }}Tokens->{{rel.name}}, +{% endif %} + /* custom = */ {{ "true" if rel.custom else "false" }}); +} + +{% endif %} +{% endfor %} +{% if cls.attrOrder|length > 0 %} +namespace { +static inline TfTokenVector +_ConcatenateAttributeNames(const TfTokenVector& left,const TfTokenVector& right) +{ + TfTokenVector result; + result.reserve(left.size() + right.size()); + result.insert(result.end(), left.begin(), left.end()); + result.insert(result.end(), right.begin(), right.end()); + return result; +} +} + +{% endif %} +/*static*/ +const TfTokenVector& +{{ cls.cppClassName }}::GetSchemaAttributeNames(bool includeInherited) +{ +{% if cls.attrOrder|length > 0 %} + static TfTokenVector localNames = { +{% for attrName in cls.attrOrder %} +{% set attr = cls.attrs[attrName] %} +{% if attr.apiName != '' %} + {{ tokensPrefix }}Tokens->{{ attr.name }}, +{% endif %} +{% endfor %} + }; + static TfTokenVector allNames = + _ConcatenateAttributeNames( + {{ cls.parentCppClassName }}::GetSchemaAttributeNames(true), + localNames); +{% else %} + static TfTokenVector localNames; + static TfTokenVector allNames = + {{ cls.parentCppClassName }}::GetSchemaAttributeNames(true); +{% endif %} + + if (includeInherited) + return allNames; + else + return localNames; +} + +{% if cls.isMultipleApply %} +/*static*/ +TfTokenVector +{{ cls.cppClassName }}::GetSchemaAttributeNames( + bool includeInherited, const TfToken &instanceName) +{ + const TfTokenVector &attrNames = GetSchemaAttributeNames(includeInherited); + if (instanceName.IsEmpty()) { + return attrNames; + } + TfTokenVector result; + result.reserve(attrNames.size()); + for (const TfToken &attrName : attrNames) { + result.push_back( + UsdSchemaRegistry::MakeMultipleApplyNameInstance(attrName, instanceName)); + } + return result; +} + +{% endif %} +{% if useExportAPI %} +{{ namespaceClose }} + +{% endif %} +// ===================================================================== // +// Feel free to add custom code below this line. It will be preserved by +// the code generator. +{% if useExportAPI %} +// +// Just remember to wrap code in the appropriate delimiters: +// '{{ namespaceOpen }}', '{{ namespaceClose }}'. +{% endif %} +// ===================================================================== // +// --(BEGIN CUSTOM CODE)-- + diff --git a/blender/lib/usd/usd/resources/codegenTemplates/schemaClass.h b/blender/lib/usd/usd/resources/codegenTemplates/schemaClass.h new file mode 100644 index 0000000..2e1134d --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/schemaClass.h @@ -0,0 +1,450 @@ +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// +#ifndef {{ Upper(libraryName) }}_GENERATED_{{ Upper(cls.className) }}_H +#define {{ Upper(libraryName) }}_GENERATED_{{ Upper(cls.className) }}_H + +/// \file {{ libraryName }}/{{ cls.GetHeaderFile() }} + +{% if useExportAPI %} +#include "pxr/pxr.h" +#include "{{ libraryPath }}/api.h" +{% endif %} +#include "{{ cls.parentLibPath }}/{{ cls.GetParentHeaderFile() }}" +#include "pxr/usd/usd/prim.h" +#include "pxr/usd/usd/stage.h" +{% if cls.tokens -%} +#include "{{ libraryPath }}/tokens.h" +{% endif %} +{% if cls.extraIncludes -%} +{{ cls.extraIncludes }} +{% endif %} + +#include "pxr/base/vt/value.h" + +#include "pxr/base/gf/vec3d.h" +#include "pxr/base/gf/vec3f.h" +#include "pxr/base/gf/matrix4d.h" + +#include "pxr/base/tf/token.h" +#include "pxr/base/tf/type.h" + +{% if useExportAPI %} +{{ namespaceOpen }} + +{% endif %} +class SdfAssetPath; + +// -------------------------------------------------------------------------- // +// {{ Upper(cls.usdPrimTypeName) }}{{' ' * (74 - cls.usdPrimTypeName|count)}} // +// -------------------------------------------------------------------------- // + +/// \class {{ cls.cppClassName }} +/// +{% if cls.doc -%} +/// {{ cls.doc }} +{% endif %} +{% if cls.doc and hasTokenAttrs -%} +/// +{%endif%} +{% if hasTokenAttrs -%} +/// For any described attribute \em Fallback \em Value or \em Allowed \em Values below +/// that are text/tokens, the actual token is published and defined in \ref {{ tokensPrefix }}Tokens. +/// So to set an attribute to the value "rightHanded", use {{ tokensPrefix }}Tokens->rightHanded +/// as the value. +{% endif %} +/// +class {{ cls.cppClassName }} : public {{ cls.parentCppClassName }} +{ +public: + /// Compile time constant representing what kind of schema this class is. + /// + /// \sa UsdSchemaKind + static const UsdSchemaKind schemaKind = {{cls.schemaKindEnumValue }}; + +{% if cls.isMultipleApply %} + /// Construct a {{ cls.cppClassName }} on UsdPrim \p prim with + /// name \p name . Equivalent to + /// {{ cls.cppClassName }}::Get( + /// prim.GetStage(), + /// prim.GetPath().AppendProperty( + /// "{{ cls.propertyNamespacePrefix }}:name")); + /// + /// for a \em valid \p prim, but will not immediately throw an error for + /// an invalid \p prim + explicit {{ cls.cppClassName }}( + const UsdPrim& prim=UsdPrim(), const TfToken &name=TfToken()) + : {{ cls.parentCppClassName }}(prim, /*instanceName*/ name) + { } + + /// Construct a {{ cls.cppClassName }} on the prim held by \p schemaObj with + /// name \p name. Should be preferred over + /// {{ cls.cppClassName }}(schemaObj.GetPrim(), name), as it preserves + /// SchemaBase state. + explicit {{ cls.cppClassName }}( + const UsdSchemaBase& schemaObj, const TfToken &name) + : {{ cls.parentCppClassName }}(schemaObj, /*instanceName*/ name) + { } +{% else %} + /// Construct a {{ cls.cppClassName }} on UsdPrim \p prim . + /// Equivalent to {{ cls.cppClassName }}::Get(prim.GetStage(), prim.GetPath()) + /// for a \em valid \p prim, but will not immediately throw an error for + /// an invalid \p prim + explicit {{ cls.cppClassName }}(const UsdPrim& prim=UsdPrim()) + : {{ cls.parentCppClassName }}(prim) + { + } + + /// Construct a {{ cls.cppClassName }} on the prim held by \p schemaObj . + /// Should be preferred over {{ cls.cppClassName }}(schemaObj.GetPrim()), + /// as it preserves SchemaBase state. + explicit {{ cls.cppClassName }}(const UsdSchemaBase& schemaObj) + : {{ cls.parentCppClassName }}(schemaObj) + { + } +{% endif %} + + /// Destructor. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + virtual ~{{ cls.cppClassName }}() {%- if cls.isAPISchemaBase %} = 0{% endif %}; + + /// Return a vector of names of all pre-declared attributes for this schema + /// class and all its ancestor classes. Does not include attributes that + /// may be authored by custom/extended methods of the schemas involved. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static const TfTokenVector & + GetSchemaAttributeNames(bool includeInherited=true); +{% if cls.isMultipleApply %} + + /// Return a vector of names of all pre-declared attributes for this schema + /// class and all its ancestor classes for a given instance name. Does not + /// include attributes that may be authored by custom/extended methods of + /// the schemas involved. The names returned will have the proper namespace + /// prefix. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static TfTokenVector + GetSchemaAttributeNames(bool includeInherited, const TfToken &instanceName); + + /// Returns the name of this multiple-apply schema instance + TfToken GetName() const { + return _GetInstanceName(); + } +{% endif %} +{% if not cls.isAPISchemaBase %} + + /// Return a {{ cls.cppClassName }} holding the prim adhering to this + /// schema at \p path on \p stage. If no prim exists at \p path on + /// \p stage, or if the prim at that path does not adhere to this schema, +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + /// return an invalid schema object. \p path must be of the format + /// .{{ cls.propertyNamespacePrefix }}:name . + /// + /// This is shorthand for the following: + /// + /// \code + /// TfToken name = SdfPath::StripNamespace(path.GetToken()); + /// {{ cls.cppClassName }}( + /// stage->GetPrimAtPath(path.GetPrimPath()), name); + /// \endcode +{% else %} + /// return an invalid schema object. This is shorthand for the following: + /// + /// \code + /// {{ cls.cppClassName }}(stage->GetPrimAtPath(path)); + /// \endcode +{% endif %} + /// + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static {{ cls.cppClassName }} + Get(const UsdStagePtr &stage, const SdfPath &path); +{% if cls.isMultipleApply %} + + /// Return a {{ cls.cppClassName }} with name \p name holding the + /// prim \p prim. Shorthand for {{ cls.cppClassName }}(prim, name); + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static {{ cls.cppClassName }} + Get(const UsdPrim &prim, const TfToken &name); + + /// Return a vector of all named instances of {{ cls.cppClassName }} on the + /// given \p prim. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static std::vector<{{ cls.cppClassName }}> + GetAll(const UsdPrim &prim); +{% endif %} +{% endif %} + +{% if cls.isConcrete %} + /// Attempt to ensure a \a UsdPrim adhering to this schema at \p path + /// is defined (according to UsdPrim::IsDefined()) on this stage. + /// + /// If a prim adhering to this schema at \p path is already defined on this + /// stage, return that prim. Otherwise author an \a SdfPrimSpec with + /// \a specifier == \a SdfSpecifierDef and this schema's prim type name for + /// the prim at \p path at the current EditTarget. Author \a SdfPrimSpec s + /// with \p specifier == \a SdfSpecifierDef and empty typeName at the + /// current EditTarget for any nonexistent, or existing but not \a Defined + /// ancestors. + /// + /// The given \a path must be an absolute prim path that does not contain + /// any variant selections. + /// + /// If it is impossible to author any of the necessary PrimSpecs, (for + /// example, in case \a path cannot map to the current UsdEditTarget's + /// namespace) issue an error and return an invalid \a UsdPrim. + /// + /// Note that this method may return a defined prim whose typeName does not + /// specify this schema class, in case a stronger typeName opinion overrides + /// the opinion at the current EditTarget. + /// + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static {{ cls.cppClassName }} + Define(const UsdStagePtr &stage, const SdfPath &path); +{% endif %} +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + /// Checks if the given name \p baseName is the base name of a property + /// of {{ cls.usdPrimTypeName }}. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static bool + IsSchemaPropertyBaseName(const TfToken &baseName); + + /// Checks if the given path \p path is of an API schema of type + /// {{ cls.usdPrimTypeName }}. If so, it stores the instance name of + /// the schema in \p name and returns true. Otherwise, it returns false. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static bool + Is{{ cls.usdPrimTypeName }}Path(const SdfPath &path, TfToken *name); +{% endif %} +{% if cls.isAppliedAPISchema and not cls.isMultipleApply %} + + /// Returns true if this single-apply API schema can be applied to + /// the given \p prim. If this schema can not be a applied to the prim, + /// this returns false and, if provided, populates \p whyNot with the + /// reason it can not be applied. + /// + /// Note that if CanApply returns false, that does not necessarily imply + /// that calling Apply will fail. Callers are expected to call CanApply + /// before calling Apply if they want to ensure that it is valid to + /// apply a schema. + /// + /// \sa UsdPrim::GetAppliedSchemas() + /// \sa UsdPrim::HasAPI() + /// \sa UsdPrim::CanApplyAPI() + /// \sa UsdPrim::ApplyAPI() + /// \sa UsdPrim::RemoveAPI() + /// + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static bool + CanApply(const UsdPrim &prim, std::string *whyNot=nullptr); + + /// Applies this single-apply API schema to the given \p prim. + /// This information is stored by adding "{{ cls.primName }}" to the + /// token-valued, listOp metadata \em apiSchemas on the prim. + /// + /// \return A valid {{ cls.cppClassName }} object is returned upon success. + /// An invalid (or empty) {{ cls.cppClassName }} object is returned upon + /// failure. See \ref UsdPrim::ApplyAPI() for conditions + /// resulting in failure. + /// + /// \sa UsdPrim::GetAppliedSchemas() + /// \sa UsdPrim::HasAPI() + /// \sa UsdPrim::CanApplyAPI() + /// \sa UsdPrim::ApplyAPI() + /// \sa UsdPrim::RemoveAPI() + /// + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static {{ cls.cppClassName }} + Apply(const UsdPrim &prim); +{% endif %} +{% if cls.isAppliedAPISchema and cls.isMultipleApply %} + + /// Returns true if this multiple-apply API schema can be applied, + /// with the given instance name, \p name, to the given \p prim. If this + /// schema can not be a applied the prim, this returns false and, if + /// provided, populates \p whyNot with the reason it can not be applied. + /// + /// Note that if CanApply returns false, that does not necessarily imply + /// that calling Apply will fail. Callers are expected to call CanApply + /// before calling Apply if they want to ensure that it is valid to + /// apply a schema. + /// + /// \sa UsdPrim::GetAppliedSchemas() + /// \sa UsdPrim::HasAPI() + /// \sa UsdPrim::CanApplyAPI() + /// \sa UsdPrim::ApplyAPI() + /// \sa UsdPrim::RemoveAPI() + /// + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static bool + CanApply(const UsdPrim &prim, const TfToken &name, + std::string *whyNot=nullptr); + + /// Applies this multiple-apply API schema to the given \p prim + /// along with the given instance name, \p name. + /// + /// This information is stored by adding "{{ cls.primName }}:name" + /// to the token-valued, listOp metadata \em apiSchemas on the prim. + /// For example, if \p name is 'instance1', the token + /// '{{ cls.primName }}:instance1' is added to 'apiSchemas'. + /// + /// \return A valid {{ cls.cppClassName }} object is returned upon success. + /// An invalid (or empty) {{ cls.cppClassName }} object is returned upon + /// failure. See \ref UsdPrim::ApplyAPI() for + /// conditions resulting in failure. + /// + /// \sa UsdPrim::GetAppliedSchemas() + /// \sa UsdPrim::HasAPI() + /// \sa UsdPrim::CanApplyAPI() + /// \sa UsdPrim::ApplyAPI() + /// \sa UsdPrim::RemoveAPI() + /// + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static {{ cls.cppClassName }} + Apply(const UsdPrim &prim, const TfToken &name); +{% endif %} + +protected: + /// Returns the kind of schema this class belongs to. + /// + /// \sa UsdSchemaKind + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + UsdSchemaKind _GetSchemaKind() const override; + +private: + // needs to invoke _GetStaticTfType. + friend class UsdSchemaRegistry; + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + static const TfType &_GetStaticTfType(); + + static bool _IsTypedSchema(); + + // override SchemaBase virtuals. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + const TfType &_GetTfType() const override; + +{% for attrName in cls.attrOrder %} +{% set attr = cls.attrs[attrName]%} +{# Only emit Create/Get API and doxygen if apiName is not empty string. #} +{% if attr.apiName != '' %} +public: + // --------------------------------------------------------------------- // + // {{ Upper(attr.apiName) }} + // --------------------------------------------------------------------- // + /// {{ attr.doc }} + /// +{% if attr.details %} + /// | || + /// | -- | -- | +{% for detail in attr.details %} + /// | {{ detail[0] }} | {{ detail[1] }} | +{% endfor %} +{% endif %} + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + UsdAttribute Get{{ Proper(attr.apiName) }}Attr() const; + + /// See Get{{ Proper(attr.apiName) }}Attr(), and also + /// \ref Usd_Create_Or_Get_Property for when to use Get vs Create. + /// If specified, author \p defaultValue as the attribute's default, + /// sparsely (when it makes sense to do so) if \p writeSparsely is \c true - + /// the default for \p writeSparsely is \c false. + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + UsdAttribute Create{{ Proper(attr.apiName) }}Attr(VtValue const &defaultValue = VtValue(), bool writeSparsely=false) const; + +{% endif %} +{% endfor %} +{% for relName in cls.relOrder %} +{% set rel = cls.rels[relName]%} +{# Only emit Create/Get API and doxygen if apiName is not empty string. #} +{% if rel.apiName != '' %} +public: + // --------------------------------------------------------------------- // + // {{ Upper(rel.apiName) }} + // --------------------------------------------------------------------- // + /// {{ rel.doc }} + /// +{% for detail in rel.details %} + /// \n {{ detail[0] }}: {{ detail[1] }} +{% endfor %} + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + UsdRelationship Get{{ Proper(rel.apiName) }}Rel() const; + + /// See Get{{ Proper(rel.apiName) }}Rel(), and also + /// \ref Usd_Create_Or_Get_Property for when to use Get vs Create + {% if useExportAPI -%} + {{ Upper(libraryName) }}_API + {% endif -%} + UsdRelationship Create{{ Proper(rel.apiName) }}Rel() const; +{% endif %} + +{% endfor %} +public: + // ===================================================================== // + // Feel free to add custom code below this line, it will be preserved by + // the code generator. + // + // Just remember to: + // - Close the class declaration with }; +{% if useExportAPI %} + // - Close the namespace with {{ namespaceClose }} +{% endif %} + // - Close the include guard with #endif + // ===================================================================== // + // --(BEGIN CUSTOM CODE)-- + diff --git a/blender/lib/usd/usd/resources/codegenTemplates/tokens.cpp b/blender/lib/usd/usd/resources/codegenTemplates/tokens.cpp new file mode 100644 index 0000000..89f63a5 --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/tokens.cpp @@ -0,0 +1,47 @@ +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// +#include "{{ libraryPath }}/tokens.h" + +{% if useExportAPI %} +{{ namespaceOpen }} + +{% endif %} +{{ tokensPrefix }}TokensType::{{ tokensPrefix }}TokensType() : +{% for token in tokens %} + {{ token.id }}("{{ token.value }}", TfToken::Immortal), +{% endfor %} + allTokens({ +{% for token in tokens %} + {{ token.id }}{% if not loop.last %},{% endif %} + +{% endfor %} + }) +{ +} + +TfStaticData<{{ tokensPrefix }}TokensType> {{ tokensPrefix }}Tokens; +{% if useExportAPI %} + +{{ namespaceClose }} +{% endif %} diff --git a/blender/lib/usd/usd/resources/codegenTemplates/tokens.h b/blender/lib/usd/usd/resources/codegenTemplates/tokens.h new file mode 100644 index 0000000..732d0bd --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/tokens.h @@ -0,0 +1,91 @@ +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// +#ifndef {{ Upper(tokensPrefix) }}_TOKENS_H +#define {{ Upper(tokensPrefix) }}_TOKENS_H + +/// \file {{ libraryName }}/tokens.h + +// XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX +// +// This is an automatically generated file (by usdGenSchema.py). +// Do not hand-edit! +// +// XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX + +{% if useExportAPI %} +#include "pxr/pxr.h" +#include "{{ libraryPath }}/api.h" +{% endif %} +#include "pxr/base/tf/staticData.h" +#include "pxr/base/tf/token.h" +#include + +{% if useExportAPI %} +{{ namespaceOpen }} + +{% endif %} + +/// \class {{ tokensPrefix }}TokensType +/// +/// \link {{ tokensPrefix }}Tokens \endlink provides static, efficient +/// \link TfToken TfTokens\endlink for use in all public USD API. +/// +/// These tokens are auto-generated from the module's schema, representing +/// property names, for when you need to fetch an attribute or relationship +/// directly by name, e.g. UsdPrim::GetAttribute(), in the most efficient +/// manner, and allow the compiler to verify that you spelled the name +/// correctly. +/// +/// {{ tokensPrefix }}Tokens also contains all of the \em allowedTokens values +/// declared for schema builtin attributes of 'token' scene description type. +{% if tokens %} +/// Use {{ tokensPrefix }}Tokens like so: +/// +/// \code +/// gprim.GetMyTokenValuedAttr().Set({{ tokensPrefix }}Tokens->{{ tokens[0].id }}); +/// \endcode +{% endif %} +struct {{ tokensPrefix }}TokensType { + {% if useExportAPI %}{{ Upper(libraryName) }}_API {% endif %}{{ tokensPrefix }}TokensType(); +{% for token in tokens %} + /// \brief "{{ token.value }}" + /// + /// {{ token.desc }} + const TfToken {{ token.id }}; +{% endfor %} + /// A vector of all of the tokens listed above. + const std::vector allTokens; +}; + +/// \var {{ tokensPrefix }}Tokens +/// +/// A global variable with static, efficient \link TfToken TfTokens\endlink +/// for use in all public USD API. \sa {{ tokensPrefix }}TokensType +extern{% if useExportAPI %} {{ Upper(libraryName) }}_API{% endif %} TfStaticData<{{ tokensPrefix }}TokensType> {{ tokensPrefix }}Tokens; +{% if useExportAPI %} + +{{ namespaceClose }} +{% endif %} + +#endif diff --git a/blender/lib/usd/usd/resources/codegenTemplates/wrapSchemaClass.cpp b/blender/lib/usd/usd/resources/codegenTemplates/wrapSchemaClass.cpp new file mode 100644 index 0000000..1075d2c --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/wrapSchemaClass.cpp @@ -0,0 +1,289 @@ +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// +#include "{{ libraryPath }}/{{ cls.GetHeaderFile() }}" +#include "pxr/usd/usd/schemaBase.h" + +#include "pxr/usd/sdf/primSpec.h" + +#include "pxr/usd/usd/pyConversions.h" +{% if cls.isAppliedAPISchema %} +#include "pxr/base/tf/pyAnnotatedBoolResult.h" +{% endif %} +#include "pxr/base/tf/pyContainerConversions.h" +#include "pxr/base/tf/pyResultConversions.h" +#include "pxr/base/tf/pyUtils.h" +#include "pxr/base/tf/wrapTypeHelpers.h" + +#include + +#include + +using namespace boost::python; + +{% if useExportAPI %} +{{ namespaceUsing }} + +namespace { + +{% endif %} +#define WRAP_CUSTOM \ + template static void _CustomWrapCode(Cls &_class) + +// fwd decl. +WRAP_CUSTOM; + +{% for attrName in cls.attrOrder -%} +{% set attr = cls.attrs[attrName] %} +{# Only emit Create/Get API if apiName is not empty string. #} +{% if attr.apiName != '' %} + +static UsdAttribute +_Create{{ Proper(attr.apiName) }}Attr({{ cls.cppClassName }} &self, + object defaultVal, bool writeSparsely) { + return self.Create{{ Proper(attr.apiName) }}Attr( + UsdPythonToSdfType(defaultVal, {{ attr.usdType }}), writeSparsely); +} +{% endif %} +{% endfor %} +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + +static bool _WrapIs{{ cls.usdPrimTypeName }}Path(const SdfPath &path) { + TfToken collectionName; + return {{ cls.cppClassName }}::Is{{ cls.usdPrimTypeName }}Path( + path, &collectionName); +} +{% endif %} +{% if not cls.isAPISchemaBase %} + +static std::string +_Repr(const {{ cls.cppClassName }} &self) +{ + std::string primRepr = TfPyRepr(self.GetPrim()); +{% if cls.isMultipleApply %} + std::string instanceName = self.GetName(); + return TfStringPrintf( + "{{ libraryName[0]|upper }}{{ libraryName[1:] }}.{{ cls.className }}(%s, '%s')", + primRepr.c_str(), instanceName.c_str()); +{% else %} + return TfStringPrintf( + "{{ libraryName[0]|upper }}{{ libraryName[1:] }}.{{ cls.className }}(%s)", + primRepr.c_str()); +{% endif %} +} +{% endif %} +{% if cls.isAppliedAPISchema %} + +struct {{ cls.cppClassName }}_CanApplyResult : + public TfPyAnnotatedBoolResult +{ + {{ cls.cppClassName }}_CanApplyResult(bool val, std::string const &msg) : + TfPyAnnotatedBoolResult(val, msg) {} +}; + +{% if cls.isMultipleApply %} +static {{ cls.cppClassName }}_CanApplyResult +_WrapCanApply(const UsdPrim& prim, const TfToken& name) +{ + std::string whyNot; + bool result = {{ cls.cppClassName }}::CanApply(prim, name, &whyNot); + return {{ cls.cppClassName }}_CanApplyResult(result, whyNot); +} +{% else %} +static {{ cls.cppClassName }}_CanApplyResult +_WrapCanApply(const UsdPrim& prim) +{ + std::string whyNot; + bool result = {{ cls.cppClassName }}::CanApply(prim, &whyNot); + return {{ cls.cppClassName }}_CanApplyResult(result, whyNot); +} +{% endif %} +{% endif %} +{% if useExportAPI %} + +} // anonymous namespace +{% endif %} + +void wrap{{ cls.cppClassName }}() +{ + typedef {{ cls.cppClassName }} This; + +{% if cls.isAppliedAPISchema %} + {{ cls.cppClassName }}_CanApplyResult::Wrap<{{ cls.cppClassName }}_CanApplyResult>( + "_CanApplyResult", "whyNot"); + +{% endif %} +{% if cls.isAPISchemaBase %} + class_< This , bases<{{ cls.parentCppClassName }}>, boost::noncopyable> cls ("APISchemaBase", "", no_init); +{% else %} + class_ > + cls("{{ cls.className }}"); +{% endif %} + + cls +{% if not cls.isAPISchemaBase %} +{% if cls.isMultipleApply %} + .def(init()) + .def(init()) +{% else %} + .def(init(arg("prim"))) + .def(init(arg("schemaObj"))) +{% endif %} +{% endif %} + .def(TfTypePythonClass()) + +{% if not cls.isAPISchemaBase %} +{% if cls.isMultipleApply %} + .def("Get", + ({{ cls.cppClassName }}(*)(const UsdStagePtr &stage, + const SdfPath &path)) + &This::Get, + (arg("stage"), arg("path"))) + .def("Get", + ({{ cls.cppClassName }}(*)(const UsdPrim &prim, + const TfToken &name)) + &This::Get, + (arg("prim"), arg("name"))) +{% else %} + .def("Get", &This::Get, (arg("stage"), arg("path"))) +{% endif %} + .staticmethod("Get") +{% endif %} +{% if cls.isMultipleApply %} + + .def("GetAll", + (std::vector<{{ cls.cppClassName }}>(*)(const UsdPrim &prim)) + &This::GetAll, + arg("prim"), + return_value_policy()) + .staticmethod("GetAll") +{% endif %} +{% if cls.isConcrete %} + + .def("Define", &This::Define, (arg("stage"), arg("path"))) + .staticmethod("Define") +{% endif %} +{% if cls.isAppliedAPISchema and not cls.isMultipleApply %} + + .def("CanApply", &_WrapCanApply, (arg("prim"))) + .staticmethod("CanApply") +{% endif %} +{% if cls.isAppliedAPISchema and cls.isMultipleApply %} + + .def("CanApply", &_WrapCanApply, (arg("prim"), arg("name"))) + .staticmethod("CanApply") +{% endif %} +{% if cls.isAppliedAPISchema and not cls.isMultipleApply %} + + .def("Apply", &This::Apply, (arg("prim"))) + .staticmethod("Apply") +{% endif %} +{% if cls.isAppliedAPISchema and cls.isMultipleApply %} + + .def("Apply", &This::Apply, (arg("prim"), arg("name"))) + .staticmethod("Apply") +{% endif %} + +{% if cls.isMultipleApply %} + .def("GetSchemaAttributeNames", + (const TfTokenVector &(*)(bool))&This::GetSchemaAttributeNames, + arg("includeInherited")=true, + return_value_policy()) + .def("GetSchemaAttributeNames", + (TfTokenVector(*)(bool, const TfToken &)) + &This::GetSchemaAttributeNames, + arg("includeInherited"), + arg("instanceName"), + return_value_policy()) +{% else %} + .def("GetSchemaAttributeNames", + &This::GetSchemaAttributeNames, + arg("includeInherited")=true, + return_value_policy()) +{% endif %} + .staticmethod("GetSchemaAttributeNames") + + .def("_GetStaticTfType", (TfType const &(*)()) TfType::Find, + return_value_policy()) + .staticmethod("_GetStaticTfType") + + .def(!self) + +{% for attrName in cls.attrOrder -%} +{% set attr = cls.attrs[attrName] %} +{# Only emit Create/Get API if apiName is not empty string. #} +{% if attr.apiName != '' %} + + .def("Get{{ Proper(attr.apiName) }}Attr", + &This::Get{{ Proper(attr.apiName) }}Attr) + .def("Create{{ Proper(attr.apiName) }}Attr", + &_Create{{ Proper(attr.apiName) }}Attr, + (arg("defaultValue")=object(), + arg("writeSparsely")=false)) +{% endif %} +{% endfor %} + +{% for relName in cls.relOrder -%} +{# Only emit Create/Get API and doxygen if apiName is not empty string. #} +{% set rel = cls.rels[relName] %} +{% if rel.apiName != '' %} + + .def("Get{{ Proper(rel.apiName) }}Rel", + &This::Get{{ Proper(rel.apiName) }}Rel) + .def("Create{{ Proper(rel.apiName) }}Rel", + &This::Create{{ Proper(rel.apiName) }}Rel) +{% endif %} +{% endfor %} +{% if cls.isMultipleApply and cls.propertyNamespacePrefix %} + .def("Is{{ cls.usdPrimTypeName }}Path", _WrapIs{{ cls.usdPrimTypeName }}Path) + .staticmethod("Is{{ cls.usdPrimTypeName }}Path") +{% endif %} +{% if not cls.isAPISchemaBase %} + .def("__repr__", ::_Repr) +{% endif %} + ; + + _CustomWrapCode(cls); +} + +// ===================================================================== // +// Feel free to add custom code below this line, it will be preserved by +// the code generator. The entry point for your custom code should look +// minimally like the following: +// +// WRAP_CUSTOM { +// _class +// .def("MyCustomMethod", ...) +// ; +// } +// +// Of course any other ancillary or support code may be provided. +{% if useExportAPI %} +// +// Just remember to wrap code in the appropriate delimiters: +// 'namespace {', '}'. +// +{% endif %} +// ===================================================================== // +// --(BEGIN CUSTOM CODE)-- + diff --git a/blender/lib/usd/usd/resources/codegenTemplates/wrapTokens.cpp b/blender/lib/usd/usd/resources/codegenTemplates/wrapTokens.cpp new file mode 100644 index 0000000..7d54d47 --- /dev/null +++ b/blender/lib/usd/usd/resources/codegenTemplates/wrapTokens.cpp @@ -0,0 +1,72 @@ +// +// Copyright 2016 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// +// GENERATED FILE. DO NOT EDIT. +#include +#include "{{ libraryPath }}/tokens.h" + +{% if useExportAPI %} +{{ namespaceUsing }} + +{% endif %} +namespace { + +// Helper to return a static token as a string. We wrap tokens as Python +// strings and for some reason simply wrapping the token using def_readonly +// bypasses to-Python conversion, leading to the error that there's no +// Python type for the C++ TfToken type. So we wrap this functor instead. +class _WrapStaticToken { +public: + _WrapStaticToken(const TfToken* token) : _token(token) { } + + std::string operator()() const + { + return _token->GetString(); + } + +private: + const TfToken* _token; +}; + +template +void +_AddToken(T& cls, const char* name, const TfToken& token) +{ + cls.add_static_property(name, + boost::python::make_function( + _WrapStaticToken(&token), + boost::python::return_value_policy< + boost::python::return_by_value>(), + boost::mpl::vector1())); +} + +} // anonymous + +void wrap{{ tokensPrefix }}Tokens() +{ + boost::python::class_<{{ tokensPrefix }}TokensType, boost::noncopyable> + cls("Tokens", boost::python::no_init); +{% for token in tokens %} + _AddToken(cls, "{{ token.id }}", {{ tokensPrefix }}Tokens->{{ token.id }}); +{% endfor %} +} diff --git a/blender/lib/usd/usd/resources/generatedSchema.usda b/blender/lib/usd/usd/resources/generatedSchema.usda new file mode 100644 index 0000000..82cbe2a --- /dev/null +++ b/blender/lib/usd/usd/resources/generatedSchema.usda @@ -0,0 +1,295 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class "Typed" ( + doc = '''The base class for all typed schemas (those that can impart a + typeName to a UsdPrim), and therefore the base class for all + concrete, instantiable "IsA" schemas. + + UsdTyped implements a typeName-based query for its override of + UsdSchemaBase::_IsCompatible(). It provides no other behavior.''' +) +{ +} + +class "APISchemaBase" ( + doc = """The base class for all API schemas. + + An API schema provides an interface to a prim's qualities, but does not + specify a typeName for the underlying prim. The prim's qualities include + its inheritance structure, attributes, relationships etc. Since it cannot + provide a typeName, an API schema is considered to be non-concrete. + + To auto-generate an API schema using usdGenSchema, simply leave the + typeName empty and make it inherit from \"/APISchemaBase\" or from another + API schema. See UsdModelAPI, UsdClipsAPI and UsdCollectionAPI for examples. + + API schemas are classified into applied and non-applied API schemas. + The author of an API schema has to decide on the type of API schema + at the time of its creation by setting customData['apiSchemaType'] in the + schema definition (i.e. in the associated primSpec inside the schema.usda + file). UsdAPISchemaBase implements methods that are used to record the + application of an API schema on a USD prim. + + If an API schema only provides an interface to set certain core bits of + metadata (like UsdModelAPI, which sets model kind and UsdClipsAPI, which + sets clips-related metadata) OR if the API schema can apply to any type of + prim or only to a known fixed set of prim types OR if there is no use of + recording the application of the API schema, in such cases, it would be + better to make it a non-applied API schema. Examples of non-applied API + schemas include UsdModelAPI, UsdClipsAPI, UsdShadeConnectableAPI and + UsdGeomPrimvarsAPI. + + If there is a need to discover (or record) whether a prim contains or + subscribes to a given API schema, it would be advantageous to make the API + schema be \"applied\". In general, API schemas that add one or more properties + to a prim should be tagged as applied API schemas. A public Apply() method + is generated for applied API schemas by usdGenSchema. An applied API schema + must be applied to a prim via a call to the generated Apply() method, for + the schema object to evaluate to true when converted to a bool using the + explicit bool conversion operator. Examples of applied API schemas include + UsdCollectionAPI, UsdGeomModelAPI and UsdGeomMotionAPI + + \\anchor UsdAPISchemaBase_SingleVsMultipleApply + \\name Single vs. Multiple Apply API Schemas + + Applied API schemas can further be classified into single-apply and + multiple-apply API schemas. As the name suggests, a single-apply API schema + can only be applied once to a prim. A multiple-apply API schema can be + applied multiple times with different 'instanceName' values. An example of + a multiple-apply API schema is UsdCollectionAPI, where the API schema is + applied to a prim once for every collection owned by the prim. + + \\note An applied API schema can only inherit from another applied API + schema or directly from APISchemaBase. Similarly, a non-applied API schema + can only inherit from a non-applied API Schema or directly from + APISchemaBase. 'usdGenSchema' attempts to issue a warning if it detects + an incompatibility. + + \\note A multiple-apply API schema may not inherit from a single-apply API + schema and vice versa. + + \\note When the bool-conversion operator is invoked on an applied API + schema, it evaluates to true only if the application of the API schema has + been recorded on the prim via a call to the auto-generated Apply() method. + + """ +) +{ +} + +class "ModelAPI" ( + doc = """UsdModelAPI is an API schema that provides an interface to a prim's + model qualities, if it does, in fact, represent the root prim of a model. + + The first and foremost model quality is its kind, i.e. the metadata + that establishes it as a model (See KindRegistry). UsdModelAPI provides + various methods for setting and querying the prim's kind, as well as + queries (also available on UsdPrim) for asking what category of model + the prim is. See \"Kind and Model-ness\". + + UsdModelAPI also provides access to a prim's \"assetInfo\" + data. While any prim can host assetInfo, it is common that published + (referenced) assets are packaged as models, therefore it is convenient + to provide access to the one from the other. + + \\todo establish an _IsCompatible() override that returns IsModel() + \\todo GetModelInstanceName() + """ +) +{ +} + +class "CollectionAPI" ( + doc = """ This is a general purpose API schema, used to describe a + collection of heterogeneous objects within the scene. \"Objects\" here may be + prims or properties belonging to prims or other collections. It's an add-on + schema that can be applied many times to a prim with different collection + names. + + A collection allows an enumeration of a set of paths to include and a + set of paths to exclude. Whether the descendants of an included + path are members of a collection are decided by its expansion rule + (see below). If the collection excludes paths that are not descendents + of included paths, the collection implicitly includes the root path + </>. If such a collection also includes paths that are not + descendants of the excluded paths, it is considered invalid, since + the intention is ambiguous. + + All the properties authored by the schema are namespaced under + \"collection:\". The given name of the collection provides additional + namespacing for the various per-collection properties, which include the + following: + +
  • uniform token collection:collectionName:expansionRule - + specified how the paths that are included in the collection must be expanded + to determine its members. Possible values include: +
      +
    • explicitOnly - only paths in the includes rel targets and not + in the excludes rel targets belong to the collection. +
    • +
    • expandPrims - all the prims at or below the includes rel- + targets (and not under the excludes rel-targets) belong to the + collection. Any property paths included in the collection would, of + course, also be honored. This is the default behavior as it satisfies + most use cases. +
    • +
    • expandPrimsAndProperties - like expandPrims, but also + includes all properties on all matched prims. We're still not quite + sure what the use cases are for this, but you can use it to capture a + whole lot of UsdObjects very concisely. +
    • +
    +
  • +
  • bool collection:collectionName:includeRoot - boolean + attribute indicating whether the pseudo-root path </> should + be counted as one of the included target paths. The fallback is false. + This separate attribute is required because relationships cannot + directly target the root. When expansionRule is explicitOnly, this + attribute is ignored. +
  • rel collection:collectionName:includes - specifies a list + of targets that are included in the collection. This can target prims or + properties directly. A collection can insert the rules of another + collection by making its includes relationship target the + collection:{collectionName} property on the owning prim of the + collection to be included (see UsdCollectionAPI::GetCollectionAttr). + It is important to note that including another collection does not + guarantee the contents of that collection will be in the final collection; + instead, the rules are merged. This means, for example, an exclude + entry may exclude a portion of the included collection. + When a collection includes one or more collections, the order in which + targets are added to the includes relationship may become significant, if + there are conflicting opinions about the same path. Targets that are added + later are considered to be stronger than earlier targets for the same path. +
  • +
  • rel collection:collectionName:excludes - specifies a list + of targets that are excluded below the included paths in this + collection. This can target prims or properties directly, but cannot + target another collection. This is to keep the membership determining + logic simple, efficient and easier to reason about. Finally, it is invalid + for a collection to exclude paths that are not included in it. The presence + of such \"orphaned\" excluded paths will not affect the set of paths included + in the collection, but may affect the performance of querying membership of + a path in the collection (see UsdCollectionAPI::MembershipQuery::IsPathIncluded) + or of enumerating the objects belonging to the collection (see + UsdCollectionAPI::GetIncludedObjects). +
  • +
  • uniform opaque collection:collectionName - opaque + attribute (meaning it can never have a value) that represents the collection + for the purpose of allowing another collection to include it. When this + property is targeted by another collection's includes relationship, + the rules of this collection will be inserted into the rules of the collection + that includes it. +
+ + Implicit inclusion + + In some scenarios it is useful to express a collection that includes + everything except certain paths. To support this, a collection + that has an exclude that is not a descendent of any include + will include the root path </>. + + Creating collections in C++ + + \\snippet examples.cpp ApplyCollections + """ +) +{ + uniform opaque collection:__INSTANCE_NAME__ ( + doc = """This property represents the collection for the purpose of + allowing another collection to include it. When this property is + targeted by another collection's includes relationship, the rules + of this collection will be inserted into the rules of the collection + that includes it. + """ + ) + rel collection:__INSTANCE_NAME__:excludes ( + doc = '''Specifies a list of targets that are excluded below + the included paths in this collection. This can target prims or + properties directly, but cannot target another collection. This is to + keep the membership determining logic simple, efficient and easier to + reason about. Finally, it is invalid for a collection to exclude + paths that are not included in it. The presence of such "orphaned" + excluded paths will not affect the set of paths included in the + collection, but may affect the performance of querying membership of + a path in the collection (see + UsdCollectionAPI::MembershipQuery::IsPathIncluded) + or of enumerating the objects belonging to the collection (see + UsdCollectionAPI::GetIncludedObjects).''' + ) + uniform token collection:__INSTANCE_NAME__:expansionRule = "expandPrims" ( + allowedTokens = ["explicitOnly", "expandPrims", "expandPrimsAndProperties"] + doc = """Specifies how the paths that are included in + the collection must be expanded to determine its members.""" + ) + uniform bool collection:__INSTANCE_NAME__:includeRoot ( + doc = """Boolean attribute indicating whether the pseudo-root + path </> should be counted as one of the included target + paths. The fallback is false. This separate attribute is + required because relationships cannot directly target the root.""" + ) + rel collection:__INSTANCE_NAME__:includes ( + doc = """Specifies a list of targets that are included in the collection. + This can target prims or properties directly. A collection can insert + the rules of another collection by making its includes + relationship target the collection:{collectionName} property on + the owning prim of the collection to be included""" + ) +} + +class "ClipsAPI" ( + doc = """ UsdClipsAPI is an API schema that provides an interface to + a prim's clip metadata. Clips are a \"value resolution\" feature that + allows one to specify a sequence of usd files (clips) to be consulted, + over time, as a source of varying overrides for the prims at and + beneath this prim in namespace. + + SetClipAssetPaths() establishes the set of clips that can be consulted. + SetClipActive() specifies the ordering of clip application over time + (clips can be repeated), while SetClipTimes() specifies time-mapping + from stage-time to clip-time for the clip active at a given stage-time, + which allows for time-dilation and repetition of clips. + Finally, SetClipPrimPath() determines the path within each clip that will + map to this prim, i.e. the location within the clip at which we will look + for opinions for this prim. + + The clip asset paths, times and active metadata can also be specified + through template clip metadata. This can be desirable when your set of + assets is very large, as the template metadata is much more concise. + SetClipTemplateAssetPath() establishes the asset identifier pattern of the + set of clips to be consulted. SetClipTemplateStride(), + SetClipTemplateEndTime(), and SetClipTemplateStartTime() specify the range + in which USD will search, based on the template. From the set of resolved + asset paths, times and active will be derived internally. + + A prim may have multiple \"clip sets\" -- named sets of clips that each + have their own values for the metadata described above. For example, + a prim might have a clip set named \"Clips_1\" that specifies some group + of clip asset paths, and another clip set named \"Clips_2\" that uses + an entirely different set of clip asset paths. These clip sets are + composed across composition arcs, so clip sets for a prim may be + defined in multiple sublayers or references, for example. Individual + metadata for a given clip set may be sparsely overridden. + + Important facts about clips: + - Within the layerstack in which clips are established, the + opinions within the clips will be weaker than any local opinions + in the layerstack, but em stronger than varying opinions coming across + references and variants. + - We will never look for metadata or default opinions in clips + when performing value resolution on the owning stage, since these + quantities must be time-invariant. + + This leads to the common structure in which we reference a model asset + on a prim, and then author clips at the same site: the asset reference + will provide the topology and unvarying data for the model, while the + clips will provide the time-sampled animation. + + For further information, see \\ref Usd_Page_ValueClips + """ +) +{ +} + diff --git a/blender/lib/usd/usd/resources/plugInfo.json b/blender/lib/usd/usd/resources/plugInfo.json new file mode 100644 index 0000000..333546d --- /dev/null +++ b/blender/lib/usd/usd/resources/plugInfo.json @@ -0,0 +1,156 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "apiSchemas": { + "appliesTo": "prims", + "type": "tokenlistop" + }, + "clipSets": { + "appliesTo": [ + "prims" + ], + "type": "stringlistop" + }, + "clips": { + "appliesTo": [ + "prims" + ], + "type": "dictionary" + }, + "fallbackPrimTypes": { + "appliesTo": [ + "layers" + ], + "type": "dictionary" + } + }, + "Types": { + "UsdAPISchemaBase": { + "alias": { + "UsdSchemaBase": "APISchemaBase" + }, + "autoGenerated": true, + "bases": [ + "UsdSchemaBase" + ], + "schemaKind": "abstractBase" + }, + "UsdClipsAPI": { + "alias": { + "UsdSchemaBase": "ClipsAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "nonAppliedAPI" + }, + "UsdCollectionAPI": { + "alias": { + "UsdSchemaBase": "CollectionAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "multipleApplyAPI" + }, + "UsdModelAPI": { + "alias": { + "UsdSchemaBase": "ModelAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "nonAppliedAPI" + }, + "UsdSchemaBase": { + "alias": { + "UsdSchemaBase": "SchemaBase" + }, + "schemaKind": "abstractBase" + }, + "UsdTyped": { + "alias": { + "UsdSchemaBase": "Typed" + }, + "autoGenerated": true, + "bases": [ + "UsdSchemaBase" + ], + "schemaKind": "abstractBase" + }, + "UsdUsdFileFormat": { + "bases": [ + "SdfFileFormat" + ], + "displayName": "USD File Format", + "extensions": [ + "usd" + ], + "formatId": "usd", + "primary": true, + "target": "usd" + }, + "UsdUsdaFileFormat": { + "bases": [ + "SdfTextFileFormat" + ], + "displayName": "USD Text File Format", + "extensions": [ + "usda" + ], + "formatId": "usda", + "primary": true, + "target": "usd" + }, + "UsdUsdcFileFormat": { + "bases": [ + "SdfFileFormat" + ], + "displayName": "USD Crate File Format", + "extensions": [ + "usdc" + ], + "formatId": "usdc", + "primary": true, + "target": "usd" + }, + "UsdUsdzFileFormat": { + "bases": [ + "SdfFileFormat" + ], + "displayName": "USDZ File Format", + "extensions": [ + "usdz" + ], + "formatId": "usdz", + "primary": true, + "supportsEditing": false, + "supportsWriting": false, + "target": "usd" + }, + "Usd_UsdzResolver": { + "bases": [ + "ArPackageResolver" + ], + "extensions": [ + "usdz" + ] + } + } + }, + "LibraryPath": "", + "Name": "usd", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usd/resources/usd/schema.usda b/blender/lib/usd/usd/resources/usd/schema.usda new file mode 100644 index 0000000..017a42d --- /dev/null +++ b/blender/lib/usd/usd/resources/usd/schema.usda @@ -0,0 +1,378 @@ +#usda 1.0 +( + "This file describes the USD primitive schemata and drives code generation." +) + +def "GLOBAL" ( + customData = { + string libraryName = "usd" + string libraryPath = "pxr/usd/usd" + # string libraryPrefix = "Usd" + # string tokensPrefix = "Usd" + dictionary libraryTokens = { + dictionary apiSchemas = { + string doc = """ + A listop metadata containing the API schemas which have + been applied to this prim, using the Apply() method on + the particular schema class. + """ + } + dictionary fallbackPrimTypes = { + string doc = """ + A dictionary metadata that maps the name of a concrete schema + prim type to an ordered list of schema prim types to use instead + if the schema prim type doesn't exist in version of USD being + used. + """ + } + } + } +) +{ +} + +class "Typed" +( + doc = """The base class for all \\em typed schemas (those that can impart a + typeName to a UsdPrim), and therefore the base class for all + concrete, instantiable "IsA" schemas. + + UsdTyped implements a typeName-based query for its override of + UsdSchemaBase::_IsCompatible(). It provides no other behavior.""" +) +{ +} + +class "APISchemaBase" +( + doc = """The base class for all \\em API schemas. + + An API schema provides an interface to a prim's qualities, but does not + specify a typeName for the underlying prim. The prim's qualities include + its inheritance structure, attributes, relationships etc. Since it cannot + provide a typeName, an API schema is considered to be non-concrete. + + To auto-generate an API schema using usdGenSchema, simply leave the + typeName empty and make it inherit from "/APISchemaBase" or from another + API schema. See UsdModelAPI, UsdClipsAPI and UsdCollectionAPI for examples. + + API schemas are classified into applied and non-applied API schemas. + The author of an API schema has to decide on the type of API schema + at the time of its creation by setting customData['apiSchemaType'] in the + schema definition (i.e. in the associated primSpec inside the schema.usda + file). UsdAPISchemaBase implements methods that are used to record the + application of an API schema on a USD prim. + + If an API schema only provides an interface to set certain core bits of + metadata (like UsdModelAPI, which sets model kind and UsdClipsAPI, which + sets clips-related metadata) OR if the API schema can apply to any type of + prim or only to a known fixed set of prim types OR if there is no use of + recording the application of the API schema, in such cases, it would be + better to make it a non-applied API schema. Examples of non-applied API + schemas include UsdModelAPI, UsdClipsAPI, UsdShadeConnectableAPI and + UsdGeomPrimvarsAPI. + + If there is a need to discover (or record) whether a prim contains or + subscribes to a given API schema, it would be advantageous to make the API + schema be "applied". In general, API schemas that add one or more properties + to a prim should be tagged as applied API schemas. A public Apply() method + is generated for applied API schemas by usdGenSchema. An applied API schema + must be applied to a prim via a call to the generated Apply() method, for + the schema object to evaluate to true when converted to a bool using the + explicit bool conversion operator. Examples of applied API schemas include + UsdCollectionAPI, UsdGeomModelAPI and UsdGeomMotionAPI + + \\anchor UsdAPISchemaBase_SingleVsMultipleApply + \\name Single vs. Multiple Apply API Schemas + + Applied API schemas can further be classified into single-apply and + multiple-apply API schemas. As the name suggests, a single-apply API schema + can only be applied once to a prim. A multiple-apply API schema can be + applied multiple times with different 'instanceName' values. An example of + a multiple-apply API schema is UsdCollectionAPI, where the API schema is + applied to a prim once for every collection owned by the prim. + + \\note An applied API schema can only inherit from another applied API + schema or directly from APISchemaBase. Similarly, a non-applied API schema + can only inherit from a non-applied API Schema or directly from + APISchemaBase. 'usdGenSchema' attempts to issue a warning if it detects + an incompatibility. + + \\note A multiple-apply API schema may not inherit from a single-apply API + schema and vice versa. + + \\note When the bool-conversion operator is invoked on an applied API + schema, it evaluates to true only if the application of the API schema has + been recorded on the prim via a call to the auto-generated Apply() method. + + """ + customData = { + string fileName = "apiSchemaBase" + } +) +{ +} + +class "ModelAPI" +( + inherits = + doc = """UsdModelAPI is an API schema that provides an interface to a prim's + model qualities, if it does, in fact, represent the root prim of a model. + + The first and foremost model quality is its \\em kind, i.e. the metadata + that establishes it as a model (See KindRegistry). UsdModelAPI provides + various methods for setting and querying the prim's kind, as well as + queries (also available on UsdPrim) for asking what category of model + the prim is. See \\ref Usd_ModelKind "Kind and Model-ness". + + UsdModelAPI also provides access to a prim's \\ref Usd_Model_AssetInfo "assetInfo" + data. While any prim \\em can host assetInfo, it is common that published + (referenced) assets are packaged as models, therefore it is convenient + to provide access to the one from the other. + + \\todo establish an _IsCompatible() override that returns IsModel() + \\todo GetModelInstanceName() + """ + customData = { + string apiSchemaType = "nonApplied" + } +) +{ +} + +class "CollectionAPI" +( + inherits = + doc = """ This is a general purpose API schema, used to describe a + collection of heterogeneous objects within the scene. "Objects" here may be + prims or properties belonging to prims or other collections. It's an add-on + schema that can be applied many times to a prim with different collection + names. + + A collection allows an enumeration of a set of paths to include and a + set of paths to exclude. Whether the descendants of an included + path are members of a collection are decided by its expansion rule + (see below). If the collection excludes paths that are not descendents + of included paths, the collection implicitly includes the root path + </>. If such a collection also includes paths that are not + descendants of the excluded paths, it is considered invalid, since + the intention is ambiguous. + + All the properties authored by the schema are namespaced under + "collection:". The given name of the collection provides additional + namespacing for the various per-collection properties, which include the + following: + +
  • uniform token collection:collectionName:expansionRule - + specified how the paths that are included in the collection must be expanded + to determine its members. Possible values include: +
      +
    • explicitOnly - only paths in the includes rel targets and not + in the excludes rel targets belong to the collection. +
    • +
    • expandPrims - all the prims at or below the includes rel- + targets (and not under the excludes rel-targets) belong to the + collection. Any property paths included in the collection would, of + course, also be honored. This is the default behavior as it satisfies + most use cases. +
    • +
    • expandPrimsAndProperties - like expandPrims, but also + includes all properties on all matched prims. We're still not quite + sure what the use cases are for this, but you can use it to capture a + whole lot of UsdObjects very concisely. +
    • +
    +
  • +
  • bool collection:collectionName:includeRoot - boolean + attribute indicating whether the pseudo-root path </> should + be counted as one of the included target paths. The fallback is false. + This separate attribute is required because relationships cannot + directly target the root. When expansionRule is explicitOnly, this + attribute is ignored. +
  • rel collection:collectionName:includes - specifies a list + of targets that are included in the collection. This can target prims or + properties directly. A collection can insert the rules of another + collection by making its includes relationship target the + collection:{collectionName} property on the owning prim of the + collection to be included (see UsdCollectionAPI::GetCollectionAttr). + It is important to note that including another collection does not + guarantee the contents of that collection will be in the final collection; + instead, the rules are merged. This means, for example, an exclude + entry may exclude a portion of the included collection. + When a collection includes one or more collections, the order in which + targets are added to the includes relationship may become significant, if + there are conflicting opinions about the same path. Targets that are added + later are considered to be stronger than earlier targets for the same path. +
  • +
  • rel collection:collectionName:excludes - specifies a list + of targets that are excluded below the included paths in this + collection. This can target prims or properties directly, but cannot + target another collection. This is to keep the membership determining + logic simple, efficient and easier to reason about. Finally, it is invalid + for a collection to exclude paths that are not included in it. The presence + of such "orphaned" excluded paths will not affect the set of paths included + in the collection, but may affect the performance of querying membership of + a path in the collection (see UsdCollectionAPI::MembershipQuery::IsPathIncluded) + or of enumerating the objects belonging to the collection (see + UsdCollectionAPI::GetIncludedObjects). +
  • +
  • uniform opaque collection:collectionName - opaque + attribute (meaning it can never have a value) that represents the collection + for the purpose of allowing another collection to include it. When this + property is targeted by another collection's includes relationship, + the rules of this collection will be inserted into the rules of the collection + that includes it. +
+ + Implicit inclusion + + In some scenarios it is useful to express a collection that includes + everything except certain paths. To support this, a collection + that has an exclude that is not a descendent of any include + will include the root path </>. + + Creating collections in C++ + + \\snippet examples.cpp ApplyCollections + """ + + customData = { + string extraIncludes = """ +#include "pxr/usd/usd/collectionMembershipQuery.h" +#include "pxr/usd/usd/primFlags.h" +#include "pxr/usd/usd/tokens.h" +""" + token apiSchemaType = "multipleApply" + token propertyNamespacePrefix = "collection" + dictionary schemaTokens = { + dictionary exclude = { + string doc = """ + This is the token used to exclude a path from a collection. + Although it is not a possible value for the "expansionRule" + attribute, it is used as the expansionRule for excluded paths + in UsdCollectionAPI::MembershipQuery::IsPathIncluded. + """ + } + } + } +) +{ + uniform token expansionRule = "expandPrims" ( + allowedTokens = ["explicitOnly", "expandPrims", "expandPrimsAndProperties"] + doc = """Specifies how the paths that are included in + the collection must be expanded to determine its members.""" + ) + uniform bool includeRoot ( + doc = """Boolean attribute indicating whether the pseudo-root + path </> should be counted as one of the included target + paths. The fallback is false. This separate attribute is + required because relationships cannot directly target the root.""" + ) + rel includes ( + doc = """Specifies a list of targets that are included in the collection. + This can target prims or properties directly. A collection can insert + the rules of another collection by making its includes + relationship target the collection:{collectionName} property on + the owning prim of the collection to be included""" + ) + rel excludes ( + doc = """Specifies a list of targets that are excluded below + the included paths in this collection. This can target prims or + properties directly, but cannot target another collection. This is to + keep the membership determining logic simple, efficient and easier to + reason about. Finally, it is invalid for a collection to exclude + paths that are not included in it. The presence of such "orphaned" + excluded paths will not affect the set of paths included in the + collection, but may affect the performance of querying membership of + a path in the collection (see + UsdCollectionAPI::MembershipQuery::IsPathIncluded) + or of enumerating the objects belonging to the collection (see + UsdCollectionAPI::GetIncludedObjects).""" + ) + uniform opaque __INSTANCE_NAME__ ( + customData = { + string apiName = "Collection" + } + doc = """This property represents the collection for the purpose of + allowing another collection to include it. When this property is + targeted by another collection's includes relationship, the rules + of this collection will be inserted into the rules of the collection + that includes it. + """ + ) +} + +class "ClipsAPI" +( + inherits = + doc = """ UsdClipsAPI is an API schema that provides an interface to + a prim's clip metadata. Clips are a "value resolution" feature that + allows one to specify a sequence of usd files (clips) to be consulted, + over time, as a source of varying overrides for the prims at and + beneath this prim in namespace. + + SetClipAssetPaths() establishes the set of clips that can be consulted. + SetClipActive() specifies the ordering of clip application over time + (clips can be repeated), while SetClipTimes() specifies time-mapping + from stage-time to clip-time for the clip active at a given stage-time, + which allows for time-dilation and repetition of clips. + Finally, SetClipPrimPath() determines the path within each clip that will + map to this prim, i.e. the location within the clip at which we will look + for opinions for this prim. + + The clip asset paths, times and active metadata can also be specified + through template clip metadata. This can be desirable when your set of + assets is very large, as the template metadata is much more concise. + SetClipTemplateAssetPath() establishes the asset identifier pattern of the + set of clips to be consulted. SetClipTemplateStride(), + SetClipTemplateEndTime(), and SetClipTemplateStartTime() specify the range + in which USD will search, based on the template. From the set of resolved + asset paths, times and active will be derived internally. + + A prim may have multiple "clip sets" -- named sets of clips that each + have their own values for the metadata described above. For example, + a prim might have a clip set named "Clips_1" that specifies some group + of clip asset paths, and another clip set named "Clips_2" that uses + an entirely different set of clip asset paths. These clip sets are + composed across composition arcs, so clip sets for a prim may be + defined in multiple sublayers or references, for example. Individual + metadata for a given clip set may be sparsely overridden. + + Important facts about clips: + \\li Within the layerstack in which clips are established, the + opinions within the clips will be \\em weaker than any local opinions + in the layerstack, but \em stronger than varying opinions coming across + references and variants. + \\li We will never look for metadata or default opinions in clips + when performing value resolution on the owning stage, since these + quantities must be time-invariant. + + This leads to the common structure in which we reference a model asset + on a prim, and then author clips at the same site: the asset reference + will provide the topology and unvarying data for the model, while the + clips will provide the time-sampled animation. + + For further information, see \\ref Usd_Page_ValueClips + """ + customData = { + token apiSchemaType = "nonApplied" + dictionary schemaTokens = { + dictionary clips = { + string doc = """ + Dictionary that contains the definition of the clip sets on + this prim. See \\ref UsdClipsAPI::GetClips. + """ + } + + dictionary clipSets = { + string doc = """ + ListOp that may be used to affect how opinions from + clip sets are applied during value resolution. + See \\ref UsdClipsAPI::GetClipSets. + """ + } + } + } +) +{ +} diff --git a/blender/lib/usd/usdGeom/resources/generatedSchema.usda b/blender/lib/usd/usdGeom/resources/generatedSchema.usda new file mode 100644 index 0000000..19b8219 --- /dev/null +++ b/blender/lib/usd/usdGeom/resources/generatedSchema.usda @@ -0,0 +1,4269 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class "Imageable" ( + doc = """Base class for all prims that may require rendering or + visualization of some sort. The primary attributes of Imageable + are visibility and purpose, which each provide instructions for + what geometry should be included for processing by rendering and other + computations. + + \\deprecated Imageable also provides API for accessing primvars, which + has been moved to the UsdGeomPrimvarsAPI schema, because primvars can now + be applied on non-Imageable prim types. This API is planned + to be removed, UsdGeomPrimvarsAPI should be used directly instead.""" +) +{ + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class "VisibilityAPI" ( + doc = ''' + UsdGeomVisibilityAPI introduces properties that can be used to author + visibility opinions. + + \\note + Currently, this schema only introduces the attributes that are used to + control purpose visibility. Later, this schema will define _all_ + visibility-related properties and UsdGeomImageable will no longer define + those properties. + + The purpose visibility attributes added by this schema, + _guideVisibility_, _proxyVisibility_, and _renderVisibility_ can each be + used to control visibility for geometry of the corresponding purpose + values, with the overall _visibility_ attribute acting as an + override. I.e., if _visibility_ evaluates to "invisible", purpose + visibility is invisible; otherwise, purpose visibility is determined by + the corresponding purpose visibility attribute. + + Note that the behavior of _guideVisibility_ is subtly different from the + _proxyVisibility_ and _renderVisibility_ attributes, in that "guide" + purpose visibility always evaluates to either "invisible" or "visible", + whereas the other attributes may yield computed values of "inherited" if + there is no authored opinion on the attribute or inherited from an + ancestor. This is motivated by the fact that, in Pixar"s user workflows, + we have never found a need to have all guides visible in a scene by + default, whereas we do find that flexibility useful for "proxy" and + "render" geometry. + + This schema can only be applied to UsdGeomImageable prims. The + UseGeomImageable schema provides API for computing the purpose visibility + values that result from the attributes introduced by this schema. + ''' +) +{ + uniform token guideVisibility = "invisible" ( + allowedTokens = ["inherited", "invisible", "visible"] + doc = ''' + This attribute controls visibility for geometry with purpose "guide". + + Unlike overall _visibility_, _guideVisibility_ is uniform, and + therefore cannot be animated. + + Also unlike overall _visibility_, _guideVisibility_ is tri-state, in + that a descendant with an opinion of "visible" overrides an ancestor + opinion of "invisible". + + The _guideVisibility_ attribute works in concert with the overall + _visibility_ attribute: The visibility of a prim with purpose "guide" + is determined by the inherited values it receives for the _visibility_ + and _guideVisibility_ attributes. If _visibility_ evaluates to + "invisible", the prim is invisible. If _visibility_ evaluates to + "inherited" and _guideVisibility_ evaluates to "visible", then the + prim is visible. __Otherwise, it is invisible.__ + ''' + ) + uniform token proxyVisibility = "inherited" ( + allowedTokens = ["inherited", "invisible", "visible"] + doc = ''' + This attribute controls visibility for geometry with purpose "proxy". + + Unlike overall _visibility_, _proxyVisibility_ is uniform, and + therefore cannot be animated. + + Also unlike overall _visibility_, _proxyVisibility_ is tri-state, in + that a descendant with an opinion of "visible" overrides an ancestor + opinion of "invisible". + + The _proxyVisibility_ attribute works in concert with the overall + _visibility_ attribute: The visibility of a prim with purpose "proxy" + is determined by the inherited values it receives for the _visibility_ + and _proxyVisibility_ attributes. If _visibility_ evaluates to + "invisible", the prim is invisible. If _visibility_ evaluates to + "inherited" then: If _proxyVisibility_ evaluates to "visible", then + the prim is visible; if _proxyVisibility_ evaluates to "invisible", + then the prim is invisible; if _proxyVisibility_ evaluates to + "inherited", then the prim may either be visible or invisible, + depending on a fallback value determined by the calling context. + ''' + ) + uniform token renderVisibility = "inherited" ( + allowedTokens = ["inherited", "invisible", "visible"] + doc = ''' + This attribute controls visibility for geometry with purpose + "render". + + Unlike overall _visibility_, _renderVisibility_ is uniform, and + therefore cannot be animated. + + Also unlike overall _visibility_, _renderVisibility_ is tri-state, in + that a descendant with an opinion of "visible" overrides an ancestor + opinion of "invisible". + + The _renderVisibility_ attribute works in concert with the overall + _visibility_ attribute: The visibility of a prim with purpose "render" + is determined by the inherited values it receives for the _visibility_ + and _renderVisibility_ attributes. If _visibility_ evaluates to + "invisible", the prim is invisible. If _visibility_ evaluates to + "inherited" then: If _renderVisibility_ evaluates to "visible", then + the prim is visible; if _renderVisibility_ evaluates to "invisible", + then the prim is invisible; if _renderVisibility_ evaluates to + "inherited", then the prim may either be visible or invisible, + depending on a fallback value determined by the calling context. + ''' + ) +} + +class "PrimvarsAPI" ( + doc = """UsdGeomPrimvarsAPI encodes geometric \"primitive variables\", + as UsdGeomPrimvar, which interpolate across a primitive's topology, + can override shader inputs, and inherit down namespace. + + Which Method to Use to Retrieve Primvars + + While creating primvars is unambiguous (CreatePrimvar()), there are quite + a few methods available for retrieving primvars, making it potentially + confusing knowing which one to use. Here are some guidelines: + + - If you are populating a GUI with the primvars already available for + authoring values on a prim, use GetPrimvars(). + - If you want all of the \"useful\" (e.g. to a renderer) primvars + available at a prim, including those inherited from ancestor prims, use + FindPrimvarsWithInheritance(). Note that doing so individually for many + prims will be inefficient. + - To find a particular primvar defined directly on a prim, which may + or may not provide a value, use GetPrimvar(). + - To find a particular primvar defined on a prim or inherited from + ancestors, which may or may not provide a value, use + FindPrimvarWithInheritance(). + - To *efficiently* query for primvars using the overloads of + FindPrimvarWithInheritance() and FindPrimvarsWithInheritance(), one + must first cache the results of FindIncrementallyInheritablePrimvars() for + each non-leaf prim on the stage. """ +) +{ +} + +class "Xformable" ( + doc = """Base class for all transformable prims, which allows arbitrary + sequences of component affine transformations to be encoded. + + \\note + You may find it useful to review while reading + this class description. + + Supported Component Transformation Operations + + UsdGeomXformable currently supports arbitrary sequences of the following + operations, each of which can be encoded in an attribute of the proper + shape in any supported precision: + - translate - 3D + - scale - 3D + - rotateX - 1D angle in degrees + - rotateY - 1D angle in degrees + - rotateZ - 1D angle in degrees + - rotateABC - 3D where ABC can be any combination of the six principle + Euler Angle sets: XYZ, XZY, YXZ, YZX, ZXY, ZYX. See + \"note on rotation packing order\" + - orient - 4D (quaternion) + - transform - 4x4D + + Creating a Component Transformation + + To add components to a UsdGeomXformable prim, simply call AddXformOp() + with the desired op type, as enumerated in \\ref UsdGeomXformOp::Type, + and the desired precision, which is one of \\ref UsdGeomXformOp::Precision. + Optionally, you can also provide an \"op suffix\" for the operator that + disambiguates it from other components of the same type on the same prim. + Application-specific transform schemas can use the suffixes to fill a role + similar to that played by AbcGeom::XformOp's \"Hint\" enums for their own + round-tripping logic. + + We also provide specific \"Add\" API for each type, for clarity and + conciseness, e.g. AddTranslateOp(), AddRotateXYZOp() etc. + + AddXformOp() will return a UsdGeomXformOp object, which is a schema on a + newly created UsdAttribute that provides convenience API for authoring + and computing the component transformations. The UsdGeomXformOp can then + be used to author any number of timesamples and default for the op. + + Each successive call to AddXformOp() adds an operator that will be applied + \"more locally\" than the preceding operator, just as if we were pushing + transforms onto a transformation stack - which is precisely what should + happen when the operators are consumed by a reader. + + \\note + If you can, please try to use the UsdGeomXformCommonAPI, which wraps + the UsdGeomXformable with an interface in which Op creation is taken + care of for you, and there is a much higher chance that the data you + author will be importable without flattening into other DCC's, as it + conforms to a fixed set of Scale-Rotate-Translate Ops. + + \\sa \"Using the Authoring API\" + + Data Encoding and Op Ordering + + Because there is no \"fixed schema\" of operations, all of the attributes + that encode transform operations are dynamic, and are scoped in + the namespace \"xformOp\". The second component of an attribute's name provides + the type of operation, as listed above. An \"xformOp\" attribute can + have additional namespace components derived from the opSuffix argument + to the AddXformOp() suite of methods, which provides a preferred way of + naming the ops such that we can have multiple \"translate\" ops with unique + attribute names. For example, in the attribute named + \"xformOp:translate:maya:pivot\", \"translate\" is the type of operation and + \"maya:pivot\" is the suffix. + + The following ordered list of attribute declarations in usda + define a basic Scale-Rotate-Translate with XYZ Euler angles, wherein the + translation is double-precision, and the remainder of the ops are single, + in which we will: + +
    +
  1. Scale by 2.0 in each dimension +
  2. Rotate about the X, Y, and Z axes by 30, 60, and 90 degrees, respectively +
  3. Translate by 100 units in the Y direction +
+ + \\code + float3 xformOp:rotateXYZ = (30, 60, 90) + float3 xformOp:scale = (2, 2, 2) + double3 xformOp:translate = (0, 100, 0) + uniform token[] xformOpOrder = [ \"xformOp:translate\", \"xformOp:rotateXYZ\", \"xformOp:scale\" ] + \\endcode + + The attributes appear in the dictionary order in which USD, by default, + sorts them. To ensure the ops are recovered and evaluated in the correct + order, the schema introduces the **xformOpOrder** attribute, which + contains the names of the op attributes, in the precise sequence in which + they should be pushed onto a transform stack. **Note** that the order is + opposite to what you might expect, given the matrix algebra described in + This also dictates order of op creation, + since each call to AddXformOp() adds a new op to the end of the + \\b xformOpOrder array, as a new \"most-local\" operation. See + \"Example 2 below\" for C++ code that could + have produced this USD. + + If it were important for the prim's rotations to be independently + overridable, we could equivalently (at some performance cost) encode + the transformation also like so: + \\code + float xformOp:rotateX = 30 + float xformOp:rotateY = 60 + float xformOp:rotateZ = 90 + float3 xformOp:scale = (2, 2, 2) + double3 xformOp:translate = (0, 100, 0) + uniform token[] xformOpOrder = [ \"xformOp:translate\", \"xformOp:rotateZ\", \"xformOp:rotateY\", \"xformOp:rotateX\", \"xformOp:scale\" ] + \\endcode + + Again, note that although we are encoding an XYZ rotation, the three + rotations appear in the **xformOpOrder** in the opposite order, with Z, + followed, by Y, followed by X. + + Were we to add a Maya-style scalePivot to the above example, it might + look like the following: + \\code + float3 xformOp:rotateXYZ = (30, 60, 90) + float3 xformOp:scale = (2, 2, 2) + double3 xformOp:translate = (0, 100, 0) + double3 xformOp:translate:scalePivot + uniform token[] xformOpOrder = [ \"xformOp:translate\", \"xformOp:rotateXYZ\", \"xformOp:translate:scalePivot\", \"xformOp:scale\" ] + \\endcode + + Paired \"Inverted\" Ops + + We have been claiming that the ordered list of ops serves as a set + of instructions to a transform stack, but you may have noticed in the last + example that there is a missing operation - the pivot for the scale op + needs to be applied in its inverse-form as a final (most local) op! In the + AbcGeom::Xform schema, we would have encoded an actual \"final\" translation + op whose value was authored by the exporter as the negation of the pivot's + value. However, doing so would be brittle in USD, given that each op can + be independently overridden, and the constraint that one attribute must be + maintained as the negation of the other in order for successful + re-importation of the schema cannot be expressed in USD. + + Our solution leverages the **xformOpOrder** member of the schema, which, + in addition to ordering the ops, may also contain one of two special + tokens that address the paired op and \"stack resetting\" behavior. + + The \"paired op\" behavior is encoded as an \"!invert!\" prefix in + \\b xformOpOrder, as the result of an AddXformOp(isInverseOp=True) call. + The \\b xformOpOrder for the last example would look like: + \\code + uniform token[] xformOpOrder = [ \"xformOp:translate\", \"xformOp:rotateXYZ\", \"xformOp:translate:scalePivot\", \"xformOp:scale\", \"!invert!xformOp:translate:scalePivot\" ] + \\endcode + + When asked for its value via UsdGeomXformOp::GetOpTransform(), an + \"inverted\" Op (i.e. the \"inverted\" half of a set of paired Ops) will fetch + the value of its paired attribute and return its negation. This works for + all op types - an error will be issued if a \"transform\" type op is singular + and cannot be inverted. When getting the authored value of an inverted op + via UsdGeomXformOp::Get(), the raw, uninverted value of the associated + attribute is returned. + + For the sake of robustness, setting a value on an inverted op is disallowed. + Attempting to set a value on an inverted op will result in a coding error + and no value being set. + + Resetting the Transform Stack + + The other special op/token that can appear in xformOpOrder is + \"!resetXformStack!\", which, appearing as the first element of + xformOpOrder, indicates this prim should not inherit the transformation + of its namespace parent. See SetResetXformStack() + + Expected Behavior for \"Missing\" Ops + + If an importer expects Scale-Rotate-Translate operations, but a prim + has only translate and rotate ops authored, the importer should assume + an identity scale. This allows us to optimize the data a bit, if only + a few components of a very rich schema (like Maya's) are authored in the + app. + + \\anchor usdGeom_xformableExamples + Using the C++ API + + #1. Creating a simple transform matrix encoding + \\snippet examples.cpp CreateMatrixWithDefault + + #2. Creating the simple SRT from the example above + \\snippet examples.cpp CreateExampleSRT + + #3. Creating a parameterized SRT with pivot using UsdGeomXformCommonAPI + \\snippet examples.cpp CreateSRTWithDefaults + + #4. Creating a rotate-only pivot transform with animated + rotation and translation + \\snippet examples.cpp CreateAnimatedTransform + +""" +) +{ + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Scope "Scope" ( + doc = """Scope is the simplest grouping primitive, and does not carry the + baggage of transformability. Note that transforms should inherit down + through a Scope successfully - it is just a guaranteed no-op from a + transformability perspective.""" +) +{ + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class Xform "Xform" ( + doc = "Concrete prim schema for a transform, which implements Xformable " +) +{ + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "Boundable" ( + doc = """Boundable introduces the ability for a prim to persistently + cache a rectilinear, local-space, extent. + + Why Extent and not Bounds ? + Boundable introduces the notion of \"extent\", which is a cached computation + of a prim's local-space 3D range for its resolved attributes at the + layer and time in which extent is authored. We have found that with + composed scene description, attempting to cache pre-computed bounds at + interior prims in a scene graph is very fragile, given the ease with which + one can author a single attribute in a stronger layer that can invalidate + many authored caches - or with which a re-published, referenced asset can + do the same. + + Therefore, we limit to precomputing (generally) leaf-prim extent, which + avoids the need to read in large point arrays to compute bounds, and + provides UsdGeomBBoxCache the means to efficiently compute and + (session-only) cache intermediate bounds. You are free to compute and + author intermediate bounds into your scenes, of course, which may work + well if you have sufficient locks on your pipeline to guarantee that once + authored, the geometry and transforms upon which they are based will + remain unchanged, or if accuracy of the bounds is not an ironclad + requisite. + + When intermediate bounds are authored on Boundable parents, the child prims + will be pruned from BBox computation; the authored extent is expected to + incorporate all child bounds.""" +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "Gprim" ( + doc = '''Base class for all geometric primitives. + + Gprim encodes basic graphical properties such as doubleSided and + orientation, and provides primvars for "display color" and "display + opacity" that travel with geometry to be used as shader overrides. ''' +) +{ + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Cube "Cube" ( + doc = """Defines a primitive rectilinear cube centered at the origin. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" +) +{ + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent = [(-1, -1, -1), (1, 1, 1)] ( + doc = """Extent is re-defined on Cube only to provide a fallback value. + \\sa UsdGeomGprim::GetExtentAttr().""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + double size = 2 ( + doc = """Indicates the length of each edge of the cube. If you + author size you must also author extent. + + \\sa GetExtentAttr()""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Sphere "Sphere" ( + doc = """Defines a primitive sphere centered at the origin. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" +) +{ + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent = [(-1, -1, -1), (1, 1, 1)] ( + doc = """Extent is re-defined on Sphere only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + double radius = 1 ( + doc = """Indicates the sphere's radius. If you + author radius you must also author extent. + + \\sa GetExtentAttr()""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Cylinder "Cylinder" ( + doc = """Defines a primitive cylinder with closed ends, centered at the + origin, whose spine is along the specified axis. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" +) +{ + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = "The axis along which the spine of the cylinder is aligned" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent = [(-1, -1, -1), (1, 1, 1)] ( + doc = """Extent is re-defined on Cylinder only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) + double height = 2 ( + doc = """The size of the cylinder's spine along the specified + axis. If you author height you must also author extent. + + \\sa GetExtentAttr()""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + double radius = 1 ( + doc = """The radius of the cylinder. If you author radius + you must also author extent. + + \\sa GetExtentAttr()""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Capsule "Capsule" ( + doc = """Defines a primitive capsule, i.e. a cylinder capped by two half + spheres, centered at the origin, whose spine is along the specified + axis.""" +) +{ + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = "The axis along which the spine of the capsule is aligned" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent = [(-0.5, -0.5, -1), (0.5, 0.5, 1)] ( + doc = """Extent is re-defined on Capsule only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) + double height = 1 ( + doc = """The size of the capsule's spine along the specified + axis excluding the size of the two half spheres, i.e. + the size of the cylinder portion of the capsule. + If you author height you must also author extent. + \\sa GetExtentAttr()""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + double radius = 0.5 ( + doc = """The radius of the capsule. If you + author radius you must also author extent. + + \\sa GetExtentAttr()""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Cone "Cone" ( + doc = """Defines a primitive cone, centered at the origin, whose spine + is along the specified axis, with the apex of the cone pointing + in the direction of the positive axis. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" +) +{ + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = "The axis along which the spine of the cone is aligned" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent = [(-1, -1, -1), (1, 1, 1)] ( + doc = """Extent is re-defined on Cone only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) + double height = 2 ( + doc = """The size of the cone's spine along the specified + axis. If you author height you must also author extent. + + \\sa GetExtentAttr()""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + double radius = 1 ( + doc = """The radius of the cone. If you + author radius you must also author extent. + + \\sa GetExtentAttr()""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Plane "Plane" ( + doc = """Defines a primitive plane, centered at the origin, and is defined by + a cardinal axis, width, and length. The plane is double-sided by default. + + The axis of width and length are perpendicular to the plane's axis: + + axis | width | length + ----- | ------ | ------- + X | z-axis | y-axis + Y | x-axis | z-axis + Z | x-axis | y-axis + + """ +) +{ + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = """The axis along which the surface of the plane is aligned. When set + to 'Z' the plane is in the xy-plane; when axis is 'X' the plane is in + the yz-plane, and when axis is 'Y' the plane is in the xz-plane. + + \\sa UsdGeomGprim::GetAxisAttr().""" + ) + uniform bool doubleSided = 1 ( + doc = """Planes are double-sided by default. Clients may also support + single-sided planes. + + \\sa UsdGeomGprim::GetDoubleSidedAttr()""" + ) + float3[] extent = [(-1, -1, 0), (1, 1, 0)] ( + doc = """Extent is re-defined on Plane only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) + double length = 2 ( + doc = """The length of the plane, which aligns to the y-axis when axis is + 'Z' or 'X', or to the z-axis when axis is 'Y'. If you author length + you must also author extent. + + \\sa UsdGeomGprim::GetExtentAttr()""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + double width = 2 ( + doc = """The width of the plane, which aligns to the x-axis when axis is + 'Z' or 'Y', or to the z-axis when axis is 'X'. If you author width + you must also author extent. + + \\sa UsdGeomGprim::GetExtentAttr()""" + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "PointBased" ( + doc = """Base class for all UsdGeomGprims that possess points, + providing common attributes such as normals and velocities.""" +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Mesh "Mesh" ( + doc = """Encodes a mesh with optional subdivision properties and features. + + As a point-based primitive, meshes are defined in terms of points that + are connected into edges and faces. Many references to meshes use the + term 'vertex' in place of or interchangeably with 'points', while some + use 'vertex' to refer to the 'face-vertices' that define a face. To + avoid confusion, the term 'vertex' is intentionally avoided in favor of + 'points' or 'face-vertices'. + + The connectivity between points, edges and faces is encoded using a + common minimal topological description of the faces of the mesh. Each + face is defined by a set of face-vertices using indices into the Mesh's + _points_ array (inherited from UsdGeomPointBased) and laid out in a + single linear _faceVertexIndices_ array for efficiency. A companion + _faceVertexCounts_ array provides, for each face, the number of + consecutive face-vertices in _faceVertexIndices_ that define the face. + No additional connectivity information is required or constructed, so + no adjacency or neighborhood queries are available. + + A key property of this mesh schema is that it encodes both subdivision + surfaces and simpler polygonal meshes. This is achieved by varying the + _subdivisionScheme_ attribute, which is set to specify Catmull-Clark + subdivision by default, so polygonal meshes must always be explicitly + declared. The available subdivision schemes and additional subdivision + features encoded in optional attributes conform to the feature set of + OpenSubdiv + (https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html). + + \\anchor UsdGeom_Mesh_Primvars + __A Note About Primvars__ + + The following list clarifies the number of elements for and the + interpolation behavior of the different primvar interpolation types + for meshes: + + - __constant__: One element for the entire mesh; no interpolation. + - __uniform__: One element for each face of the mesh; elements are + typically not interpolated but are inherited by other faces derived + from a given face (via subdivision, tessellation, etc.). + - __varying__: One element for each point of the mesh; + interpolation of point data is always linear. + - __vertex__: One element for each point of the mesh; + interpolation of point data is applied according to the + _subdivisionScheme_ attribute. + - __faceVarying__: One element for each of the face-vertices that + define the mesh topology; interpolation of face-vertex data may + be smooth or linear, according to the _subdivisionScheme_ and + _faceVaryingLinearInterpolation_ attributes. + + Primvar interpolation types and related utilities are described more + generally in \\ref Usd_InterpolationVals. + + \\anchor UsdGeom_Mesh_Normals + __A Note About Normals__ + + Normals should not be authored on a subdivision mesh, since subdivision + algorithms define their own normals. They should only be authored for + polygonal meshes (_subdivisionScheme_ = \"none\"). + + The _normals_ attribute inherited from UsdGeomPointBased is not a generic + primvar, but the number of elements in this attribute will be determined by + its _interpolation_. See . + If _normals_ and _primvars:normals_ are both specified, the latter has + precedence. If a polygonal mesh specifies __neither__ _normals_ nor + _primvars:normals_, then it should be treated and rendered as faceted, + with no attempt to compute smooth normals. + + The normals generated for smooth subdivision schemes, e.g. Catmull-Clark + and Loop, will likewise be smooth, but others, e.g. Bilinear, may be + discontinuous between faces and/or within non-planar irregular faces.""" +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + int[] cornerIndices = [] ( + doc = """The indices of points for which a corresponding sharpness + value is specified in _cornerSharpnesses_ (so the size of this array + must match that of _cornerSharpnesses_).""" + ) + float[] cornerSharpnesses = [] ( + doc = """The sharpness values associated with a corresponding set of + points specified in _cornerIndices_ (so the size of this array must + match that of _cornerIndices_). Use the constant `SHARPNESS_INFINITE` + for a perfectly sharp corner.""" + ) + int[] creaseIndices = [] ( + doc = """The indices of points grouped into sets of successive pairs + that identify edges to be creased. The size of this array must be + equal to the sum of all elements of the _creaseLengths_ attribute.""" + ) + int[] creaseLengths = [] ( + doc = """The length of this array specifies the number of creases + (sets of adjacent sharpened edges) on the mesh. Each element gives + the number of points of each crease, whose indices are successively + laid out in the _creaseIndices_ attribute. Since each crease must + be at least one edge long, each element of this array must be at + least two.""" + ) + float[] creaseSharpnesses = [] ( + doc = """The per-crease or per-edge sharpness values for all creases. + Since _creaseLengths_ encodes the number of points in each crease, + the number of elements in this array will be either len(creaseLengths) + or the sum over all X of (creaseLengths[X] - 1). Note that while + the RI spec allows each crease to have either a single sharpness + or a value per-edge, USD will encode either a single sharpness + per crease on a mesh, or sharpnesses for all edges making up + the creases on a mesh. Use the constant `SHARPNESS_INFINITE` for a + perfectly sharp crease.""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + token faceVaryingLinearInterpolation = "cornersPlus1" ( + allowedTokens = ["none", "cornersOnly", "cornersPlus1", "cornersPlus2", "boundaries", "all"] + doc = '''Specifies how elements of a primvar of interpolation type + "faceVarying" are interpolated for subdivision surfaces. Interpolation + can be as smooth as a "vertex" primvar or constrained to be linear at + features specified by several options. Valid values correspond to + choices available in OpenSubdiv: + + - __none__: No linear constraints or sharpening, smooth everywhere + - __cornersOnly__: Sharpen corners of discontinuous boundaries only, + smooth everywhere else + - __cornersPlus1__: The default, same as "cornersOnly" plus additional + sharpening at points where three or more distinct face-varying + values occur + - __cornersPlus2__: Same as "cornersPlus1" plus additional sharpening + at points with at least one discontinuous boundary corner or + only one discontinuous boundary edge (a dart) + - __boundaries__: Piecewise linear along discontinuous boundaries, + smooth interior + - __all__: Piecewise linear everywhere + + These are illustrated and described in more detail in the OpenSubdiv + documentation: + https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html#face-varying-interpolation-rules''' + ) + int[] faceVertexCounts ( + doc = """Provides the number of vertices in each face of the mesh, + which is also the number of consecutive indices in _faceVertexIndices_ + that define the face. The length of this attribute is the number of + faces in the mesh. If this attribute has more than + one timeSample, the mesh is considered to be topologically varying.""" + ) + int[] faceVertexIndices ( + doc = """Flat list of the index (into the _points_ attribute) of each + vertex of each face in the mesh. If this attribute has more than + one timeSample, the mesh is considered to be topologically varying.""" + ) + int[] holeIndices = [] ( + doc = """The indices of all faces that should be treated as holes, + i.e. made invisible. This is traditionally a feature of subdivision + surfaces and not generally applied to polygonal meshes.""" + ) + token interpolateBoundary = "edgeAndCorner" ( + allowedTokens = ["none", "edgeOnly", "edgeAndCorner"] + doc = '''Specifies how subdivision is applied for faces adjacent to + boundary edges and boundary points. Valid values correspond to choices + available in OpenSubdiv: + + - __none__: No boundary interpolation is applied and boundary faces are + effectively treated as holes + - __edgeOnly__: A sequence of boundary edges defines a smooth curve to + which the edges of subdivided boundary faces converge + - __edgeAndCorner__: The default, similar to "edgeOnly" but the smooth + boundary curve is made sharp at corner points + + These are illustrated and described in more detail in the OpenSubdiv + documentation: + https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html#boundary-interpolation-rules''' + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + uniform token subdivisionScheme = "catmullClark" ( + allowedTokens = ["catmullClark", "loop", "bilinear", "none"] + doc = '''The subdivision scheme to be applied to the surface. + Valid values are: + + - __catmullClark__: The default, Catmull-Clark subdivision; preferred + for quad-dominant meshes (generalizes B-splines); interpolation + of point data is smooth (non-linear) + - __loop__: Loop subdivision; preferred for purely triangular meshes; + interpolation of point data is smooth (non-linear) + - __bilinear__: Subdivision reduces all faces to quads (topologically + similar to "catmullClark"); interpolation of point data is bilinear + - __none__: No subdivision, i.e. a simple polygonal mesh; interpolation + of point data is linear + + Polygonal meshes are typically lighter weight and faster to render, + depending on renderer and render mode. Use of "bilinear" will produce + a similar shape to a polygonal mesh and may offer additional guarantees + of watertightness and additional subdivision features (e.g. holes) but + may also not respect authored normals.''' + ) + token triangleSubdivisionRule = "catmullClark" ( + allowedTokens = ["catmullClark", "smooth"] + doc = '''Specifies an option to the subdivision rules for the + Catmull-Clark scheme to try and improve undesirable artifacts when + subdividing triangles. Valid values are "catmullClark" for the + standard rules (the default) and "smooth" for the improvement. + + See https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html#triangle-subdivision-rule''' + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class GeomSubset "GeomSubset" ( + doc = """Encodes a subset of a piece of geometry (i.e. a UsdGeomImageable) + as a set of indices. Currently only supports encoding of face-subsets, but + could be extended in the future to support subsets representing edges, + segments, points etc. + + To apply to a geometric prim, a GeomSubset prim must be the prim's direct + child in namespace, and possess a concrete defining specifier (i.e. def). + This restriction makes it easy and efficient to discover subsets of a prim. + We might want to relax this restriction if it's common to have multiple + families of subsets on a gprim and if it's useful to be able to + organize subsets belonging to a family under a common scope. See + 'familyName' attribute for more info on defining a family of subsets. + + Note that a GeomSubset isn't an imageable (i.e. doesn't derive from + UsdGeomImageable). So, you can't author visibility for it or + override its purpose. + + Materials are bound to GeomSubsets just as they are for regular + geometry using API available in UsdShade (UsdShadeMaterial::Bind). +""" +) +{ + uniform token elementType = "face" ( + allowedTokens = ["face"] + doc = '''The type of element that the indices target. Currently only + allows "face" and defaults to it.''' + ) + uniform token familyName = "" ( + doc = '''The name of the family of subsets that this subset belongs to. + This is optional and is primarily useful when there are multiple + families of subsets under a geometric prim. In some cases, this could + also be used for achieving proper roundtripping of subset data between + DCC apps. + When multiple subsets belonging to a prim have the same familyName, they + are said to belong to the family. A familyType value can be + encoded on the owner of a family of subsets as a token using the static + method UsdGeomSubset::SetFamilyType(). "familyType" can have one of the + following values: +
  • UsdGeomTokens->partition: implies that every element of + the whole geometry appears exactly once in only one of the subsets + belonging to the family.
  • +
  • UsdGeomTokens->nonOverlapping: an element that appears in one + subset may not appear in any other subset belonging to the family.
  • +
  • UsdGeomTokens->unrestricted: implies that there are no + restrictions w.r.t. the membership of elements in the subsets. They + could be overlapping and the union of all subsets in the family may + not represent the whole.
  • +
+ \\note The validity of subset data is not enforced by the authoring + APIs, however they can be checked using UsdGeomSubset::ValidateFamily(). + ''' + ) + int[] indices = [] ( + doc = """The set of indices included in this subset. The indices need not + be sorted, but the same index should not appear more than once.""" + ) +} + +class NurbsPatch "NurbsPatch" ( + doc = """Encodes a rational or polynomial non-uniform B-spline + surface, with optional trim curves. + + The encoding mostly follows that of RiNuPatch and RiTrimCurve: + https://renderman.pixar.com/resources/current/RenderMan/geometricPrimitives.html#rinupatch , with some minor renaming and coalescing for clarity. + + The layout of control vertices in the points attribute inherited + from UsdGeomPointBased is row-major with U considered rows, and V columns. + + \\anchor UsdGeom_NurbsPatch_Form + NurbsPatch Form + + The authored points, orders, knots, weights, and ranges are all that is + required to render the nurbs patch. However, the only way to model closed + surfaces with nurbs is to ensure that the first and last control points + along the given axis are coincident. Similarly, to ensure the surface is + not only closed but also C2 continuous, the last order - 1 control + points must be (correspondingly) coincident with the first order - 1 + control points, and also the spacing of the last corresponding knots + must be the same as the first corresponding knots. + + Form is provided as an aid to interchange between modeling and + animation applications so that they can robustly identify the intent with + which the surface was modelled, and take measures (if they are able) to + preserve the continuity/concidence constraints as the surface may be rigged + or deformed. + - An open-form NurbsPatch has no continuity constraints. + - A closed-form NurbsPatch expects the first and last control points + to overlap + - A periodic-form NurbsPatch expects the first and last + order - 1 control points to overlap. + + Nurbs vs Subdivision Surfaces + + Nurbs are an important modeling primitive in CAD/CAM tools and early + computer graphics DCC's. Because they have a natural UV parameterization + they easily support \"trim curves\", which allow smooth shapes to be + carved out of the surface. + + However, the topology of the patch is always rectangular, and joining two + nurbs patches together (especially when they have differing numbers of + spans) is difficult to do smoothly. Also, nurbs are not supported by + the Ptex texturing technology (http://ptex.us). + + Neither of these limitations are shared by subdivision surfaces; therefore, + although they do not subscribe to trim-curve-based shaping, subdivs are + often considered a more flexible modeling primitive. + """ +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + double[] pointWeights ( + doc = """Optionally provides \"w\" components for each control point, + thus must be the same length as the points attribute. If authored, + the patch will be rational. If unauthored, the patch will be + polynomial, i.e. weight for all points is 1.0. + \\note Some DCC's pre-weight the points, but in this schema, + points are not pre-weighted.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + int[] trimCurve:counts ( + doc = '''Each element specifies how many curves are present in each + "loop" of the trimCurve, and the length of the array determines how + many loops the trimCurve contains. The sum of all elements is the + total nuber of curves in the trim, to which we will refer as + nCurves in describing the other trim attributes.''' + ) + double[] trimCurve:knots ( + doc = """Flat list of parametric values for each of the + nCurves curves. There will be as many knots as the sum over + all elements of vertexCounts plus the sum over all elements of + orders.""" + ) + int[] trimCurve:orders ( + doc = "Flat list of orders for each of the nCurves curves." + ) + double3[] trimCurve:points ( + doc = """Flat list of homogeneous 2D points (u, v, w) that comprise + the nCurves curves. The number of points should be equal to the + um over all elements of vertexCounts.""" + ) + double2[] trimCurve:ranges ( + doc = """Flat list of minimum and maximum parametric values + (as defined by knots) for each of the nCurves curves.""" + ) + int[] trimCurve:vertexCounts ( + doc = """Flat list of number of vertices for each of the + nCurves curves.""" + ) + uniform token uForm = "open" ( + allowedTokens = ["open", "closed", "periodic"] + doc = '''Interpret the control grid and knot vectors as representing + an open, geometrically closed, or geometrically closed and C2 continuous + surface along the U dimension. + \\sa "NurbsPatch Form" ''' + ) + double[] uKnots ( + doc = """Knot vector for U direction providing U parameterization. + The length of this array must be ( uVertexCount + uOrder ), and its + entries must take on monotonically increasing values.""" + ) + int uOrder ( + doc = """Order in the U direction. Order must be positive and is + equal to the degree of the polynomial basis to be evaluated, plus 1.""" + ) + double2 uRange ( + doc = """Provides the minimum and maximum parametric values (as defined + by uKnots) over which the surface is actually defined. The minimum + must be less than the maximum, and greater than or equal to the + value of uKnots[uOrder-1]. The maxium must be less than or equal + to the last element's value in uKnots.""" + ) + int uVertexCount ( + doc = """Number of vertices in the U direction. Should be at least as + large as uOrder.""" + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + uniform token vForm = "open" ( + allowedTokens = ["open", "closed", "periodic"] + doc = '''Interpret the control grid and knot vectors as representing + an open, geometrically closed, or geometrically closed and C2 continuous + surface along the V dimension. + \\sa "NurbsPatch Form" ''' + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + double[] vKnots ( + doc = """Knot vector for V direction providing U parameterization. + The length of this array must be ( vVertexCount + vOrder ), and its + entries must take on monotonically increasing values.""" + ) + int vOrder ( + doc = """Order in the V direction. Order must be positive and is + equal to the degree of the polynomial basis to be evaluated, plus 1.""" + ) + double2 vRange ( + doc = """Provides the minimum and maximum parametric values (as defined + by vKnots) over which the surface is actually defined. The minimum + must be less than the maximum, and greater than or equal to the + value of vKnots[vOrder-1]. The maxium must be less than or equal + to the last element's value in vKnots.""" + ) + int vVertexCount ( + doc = """Number of vertices in the V direction. Should be at least as + large as vOrder.""" + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "Curves" ( + doc = """Base class for UsdGeomBasisCurves, UsdGeomNurbsCurves, and + UsdGeomHermiteCurves. The BasisCurves schema is designed to be + analagous to offline renderers' notion of batched curves (such as + the classical RIB definition via Basis and Curves statements), + while the NurbsCurve schema is designed to be analgous to the + NURBS curves found in packages like Maya and Houdini while + retaining their consistency with the RenderMan specification for + NURBS Patches. HermiteCurves are useful for the + interchange of animation guides and paths. + + It is safe to use the length of the curve vertex count to derive + the number of curves and the number and layout of curve vertices, + but this schema should NOT be used to derive the number of curve + points. While vertex indices are implicit in all shipped + descendent types of this schema, one should not assume that all + internal or future shipped schemas will follow this pattern. Be + sure to key any indexing behavior off the concrete type, not this + abstract type. + """ +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + int[] curveVertexCounts ( + doc = """Curves-derived primitives can represent multiple distinct, + potentially disconnected curves. The length of 'curveVertexCounts' + gives the number of such curves, and each element describes the + number of vertices in the corresponding curve""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + float[] widths ( + doc = """Provides width specification for the curves, whose application + will depend on whether the curve is oriented (normals are defined for + it), in which case widths are \"ribbon width\", or unoriented, in which + case widths are cylinder width. 'widths' is not a generic Primvar, + but the number of elements in this attribute will be determined by + its 'interpolation'. See . If 'widths' + and 'primvars:widths' are both specified, the latter has precedence.""" + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class BasisCurves "BasisCurves" ( + doc = """BasisCurves are a batched curve representation analogous to the + classic RIB definition via Basis and Curves statements. BasisCurves are + often used to render dense aggregate geometry like hair or grass. + + A 'matrix' and 'vstep' associated with the basis are used to + interpolate the vertices of a cubic BasisCurves. (The basis attribute + is unused for linear BasisCurves.) + + A single prim may have many curves whose count is determined implicitly by + the length of the curveVertexCounts vector. Each individual curve is + composed of one or more segments. Each segment is defined by four vertices + for cubic curves and two vertices for linear curves. See the next section + for more information on how to map curve vertex counts to segment counts. + + Segment Indexing + Interpolating a curve requires knowing how to decompose it into its + individual segments. + + The segments of a cubic curve are determined by the vertex count, + the wrap (periodicity), and the vstep of the basis. For linear + curves, the basis token is ignored and only the vertex count and + wrap are needed. + + cubic basis | vstep + ------------- | ------ + bezier | 3 + catmullRom | 1 + bspline | 1 + + The first segment of a cubic (nonperiodic) curve is always defined by its + first four points. The vstep is the increment used to determine what + vertex indices define the next segment. For a two segment (nonperiodic) + bspline basis curve (vstep = 1), the first segment will be defined by + interpolating vertices [0, 1, 2, 3] and the second segment will be defined + by [1, 2, 3, 4]. For a two segment bezier basis curve (vstep = 3), the + first segment will be defined by interpolating vertices [0, 1, 2, 3] and + the second segment will be defined by [3, 4, 5, 6]. If the vstep is not + one, then you must take special care to make sure that the number of cvs + properly divides by your vstep. (The indices described are relative to + the initial vertex index for a batched curve.) + + For periodic curves, at least one of the curve's initial vertices are + repeated to close the curve. For cubic curves, the number of vertices + repeated is '4 - vstep'. For linear curves, only one vertex is repeated + to close the loop. + + Pinned curves are a special case of nonperiodic curves that only affects + the behavior of cubic Bspline and Catmull-Rom curves. To evaluate or render + pinned curves, a client must effectively add 'phantom points' at the + beginning and end of every curve in a batch. These phantom points + are injected to ensure that the interpolated curve begins at P[0] and + ends at P[n-1]. + + For a curve with initial point P[0] and last point P[n-1], the phantom + points are defined as. + P[-1] = 2 * P[0] - P[1] + P[n] = 2 * P[n-1] - P[n-2] + + Pinned cubic curves will (usually) have to be unpacked into the standard + nonperiodic representation before rendering. This unpacking can add some + additional overhead. However, using pinned curves reduces the amount of + data recorded in a scene and (more importantly) better records the + authors' intent for interchange. + + \\note The additional phantom points mean that the minimum curve vertex + count for cubic bspline and catmullRom curves is 2. + + Linear curve segments are defined by two vertices. + A two segment linear curve's first segment would be defined by + interpolating vertices [0, 1]. The second segment would be defined by + vertices [1, 2]. (Again, for a batched curve, indices are relative to + the initial vertex index.) + + When validating curve topology, each renderable entry in the + curveVertexCounts vector must pass this check. + + type | wrap | validitity + ------- | --------------------------- | ---------------- + linear | nonperiodic | curveVertexCounts[i] > 2 + linear | periodic | curveVertexCounts[i] > 3 + cubic | nonperiodic | (curveVertexCounts[i] - 4) % vstep == 0 + cubic | periodic | (curveVertexCounts[i]) % vstep == 0 + cubic | pinned (catmullRom/bspline) | (curveVertexCounts[i] - 2) >= 0 + + Cubic Vertex Interpolation + + \\image html USDCurveBasisMatrix.png width=750 + + Linear Vertex Interpolation + + Linear interpolation is always used on curves of type linear. + 't' with domain [0, 1], the curve is defined by the equation + P0 * (1-t) + P1 * t. t at 0 describes the first point and t at 1 describes + the end point. + + Primvar Interpolation + + For cubic curves, primvar data can be either interpolated cubically between + vertices or linearly across segments. The corresponding token + for cubic interpolation is 'vertex' and for linear interpolation is + 'varying'. Per vertex data should be the same size as the number + of vertices in your curve. Segment varying data is dependent on the + wrap (periodicity) and number of segments in your curve. For linear curves, + varying and vertex data would be interpolated the same way. By convention + varying is the preferred interpolation because of the association of + varying with linear interpolation. + + \\image html USDCurvePrimvars.png + + To convert an entry in the curveVertexCounts vector into a segment count + for an individual curve, apply these rules. Sum up all the results in + order to compute how many total segments all curves have. + + The following tables describe the expected segment count for the 'i'th + curve in a curve batch as well as the entire batch. Python syntax + like '[:]' (to describe all members of an array) and 'len(...)' + (to describe the length of an array) are used. + + type | wrap | curve segment count | batch segment count + ------- | --------------------------- | -------------------------------------- | -------------------------- + linear | nonperiodic | curveVertexCounts[i] - 1 | sum(curveVertexCounts[:]) - len(curveVertexCounts) + linear | periodic | curveVertexCounts[i] | sum(curveVertexCounts[:]) + cubic | nonperiodic | (curveVertexCounts[i] - 4) / vstep + 1 | sum(curveVertexCounts[:] - 4) / vstep + len(curveVertexCounts) + cubic | periodic | curveVertexCounts[i] / vstep | sum(curveVertexCounts[:]) / vstep + cubic | pinned (catmullRom/bspline) | (curveVertexCounts[i] - 2) + 1 | sum(curveVertexCounts[:] - 2) + len(curveVertexCounts) + + The following table descrives the expected size of varying + (linearly interpolated) data, derived from the segment counts computed + above. + + wrap | curve varying count | batch varying count + ------------------- | ---------------------------- | ------------------------------------------------ + nonperiodic/pinned | segmentCounts[i] + 1 | sum(segmentCounts[:]) + len(curveVertexCounts) + periodic | segmentCounts[i] | sum(segmentCounts[:]) + + Both curve types additionally define 'constant' interpolation for the + entire prim and 'uniform' interpolation as per curve data. + + + \\note Take care when providing support for linearly interpolated data for + cubic curves. Its shape doesn't provide a one to one mapping with either + the number of curves (like 'uniform') or the number of vertices (like + 'vertex') and so it is often overlooked. This is the only primitive in + UsdGeom (as of this writing) where this is true. For meshes, while they + use different interpolation methods, 'varying' and 'vertex' are both + specified per point. It's common to assume that curves follow a similar + pattern and build in structures and language for per primitive, per + element, and per point data only to come upon these arrays that don't + quite fit into either of those categories. It is + also common to conflate 'varying' with being per segment data and use the + segmentCount rules table instead of its neighboring varying data table + rules. We suspect that this is because for the common case of + nonperiodic cubic curves, both the provided segment count and varying data + size formula end with '+ 1'. While debugging, users may look at the double + '+ 1' as a mistake and try to remove it. We take this time to enumerate + these issues because we've fallen into them before and hope that we save + others time in their own implementations. + + As an example of deriving per curve segment and varying primvar data counts from + the wrap, type, basis, and curveVertexCount, the following table is provided. + + wrap | type | basis | curveVertexCount | curveSegmentCount | varyingDataCount + ------------- | ------- | ------- | ----------------- | ------------------ | ------------------------- + nonperiodic | linear | N/A | [2 3 2 5] | [1 2 1 4] | [2 3 2 5] + nonperiodic | cubic | bezier | [4 7 10 4 7] | [1 2 3 1 2] | [2 3 4 2 3] + nonperiodic | cubic | bspline | [5 4 6 7] | [2 1 3 4] | [3 2 4 5] + periodic | cubic | bezier | [6 9 6] | [2 3 2] | [2 3 2] + periodic | linear | N/A | [3 7] | [3 7] | [3 7] + + Tubes and Ribbons + + The strictest definition of a curve as an infinitely thin wire is not + particularly useful for describing production scenes. The additional + widths and normals attributes can be used to describe cylindrical + tubes and or flat oriented ribbons. + + Curves with only widths defined are imaged as tubes with radius + 'width / 2'. Curves with both widths and normals are imaged as ribbons + oriented in the direction of the interpolated normal vectors. + + While not technically UsdGeomPrimvars, widths and normals + also have interpolation metadata. It's common for authored widths to have + constant, varying, or vertex interpolation + (see UsdGeomCurves::GetWidthsInterpolation()). It's common for + authored normals to have varying interpolation + (see UsdGeomPointBased::GetNormalsInterpolation()). + + \\image html USDCurveHydra.png + + The file used to generate these curves can be found in + extras/usd/examples/usdGeomExamples/basisCurves.usda. It's provided + as a reference on how to properly image both tubes and ribbons. The first + row of curves are linear; the second are cubic bezier. (We aim in future + releases of HdSt to fix the discontinuity seen with broken tangents to + better match offline renderers like RenderMan.) The yellow and violet + cubic curves represent cubic vertex width interpolation for which there is + no equivalent for linear curves. + + \\note How did this prim type get its name? This prim is a portmanteau of + two different statements in the original RenderMan specification: + 'Basis' and 'Curves'. +""" +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + uniform token basis = "bezier" ( + allowedTokens = ["bezier", "bspline", "catmullRom"] + doc = """The basis specifies the vstep and matrix used for cubic + interpolation. \\note The 'hermite' and 'power' tokens have been + removed. We've provided UsdGeomHermiteCurves + as an alternative for the 'hermite' basis.""" + ) + int[] curveVertexCounts ( + doc = """Curves-derived primitives can represent multiple distinct, + potentially disconnected curves. The length of 'curveVertexCounts' + gives the number of such curves, and each element describes the + number of vertices in the corresponding curve""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + uniform token type = "cubic" ( + allowedTokens = ["linear", "cubic"] + doc = """Linear curves interpolate linearly between two vertices. + Cubic curves use a basis matrix with four vertices to interpolate a segment.""" + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + float[] widths ( + doc = """Provides width specification for the curves, whose application + will depend on whether the curve is oriented (normals are defined for + it), in which case widths are \"ribbon width\", or unoriented, in which + case widths are cylinder width. 'widths' is not a generic Primvar, + but the number of elements in this attribute will be determined by + its 'interpolation'. See . If 'widths' + and 'primvars:widths' are both specified, the latter has precedence.""" + ) + uniform token wrap = "nonperiodic" ( + allowedTokens = ["nonperiodic", "periodic", "pinned"] + doc = """If wrap is set to periodic, the curve when rendered will + repeat the initial vertices (dependent on the vstep) to close the + curve. If wrap is set to 'pinned', phantom points may be created + to ensure that the curve interpolation starts at P[0] and ends at P[n-1]. + """ + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class NurbsCurves "NurbsCurves" ( + doc = """This schema is analagous to NURBS Curves in packages like Maya + and Houdini, often used for interchange of rigging and modeling curves. + Unlike Maya, this curve spec supports batching of multiple curves into a + single prim, widths, and normals in the schema. Additionally, we require + 'numSegments + 2 * degree + 1' knots (2 more than maya does). This is to + be more consistent with RenderMan's NURBS patch specification. + + To express a periodic curve: + - knot[0] = knot[1] - (knots[-2] - knots[-3]; + - knot[-1] = knot[-2] + (knot[2] - knots[1]); + + To express a nonperiodic curve: + - knot[0] = knot[1]; + - knot[-1] = knot[-2]; + + In spite of these slight differences in the spec, curves generated in Maya + should be preserved when roundtripping. + + order and range, when representing a batched NurbsCurve should be + authored one value per curve. knots should be the concatentation of + all batched curves.""" +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + int[] curveVertexCounts ( + doc = """Curves-derived primitives can represent multiple distinct, + potentially disconnected curves. The length of 'curveVertexCounts' + gives the number of such curves, and each element describes the + number of vertices in the corresponding curve""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + double[] knots ( + doc = """Knot vector providing curve parameterization. + The length of the slice of the array for the ith curve + must be ( curveVertexCount[i] + order[i] ), and its + entries must take on monotonically increasing values.""" + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + int[] order = [] ( + doc = """Order of the curve. Order must be positive and is + equal to the degree of the polynomial basis to be evaluated, plus 1. + Its value for the 'i'th curve must be less than or equal to + curveVertexCount[i]""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + double[] pointWeights ( + doc = """Optionally provides \"w\" components for each control point, + thus must be the same length as the points attribute. If authored, + the curve will be rational. If unauthored, the curve will be + polynomial, i.e. weight for all points is 1.0. + \\note Some DCC's pre-weight the points, but in this schema, + points are not pre-weighted.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + double2[] ranges ( + doc = """Provides the minimum and maximum parametric values (as defined + by knots) over which the curve is actually defined. The minimum must + be less than the maximum, and greater than or equal to the value of the + knots['i'th curve slice][order[i]-1]. The maxium must be less + than or equal to the last element's value in knots['i'th curve slice]. +\tRange maps to (vmin, vmax) in the RenderMan spec.""" + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + float[] widths ( + doc = """Provides width specification for the curves, whose application + will depend on whether the curve is oriented (normals are defined for + it), in which case widths are \"ribbon width\", or unoriented, in which + case widths are cylinder width. 'widths' is not a generic Primvar, + but the number of elements in this attribute will be determined by + its 'interpolation'. See . If 'widths' + and 'primvars:widths' are both specified, the latter has precedence.""" + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Points "Points" ( + doc = """Points are analogous to the RiPoints spec. + + Points can be an efficient means of storing and rendering particle + effects comprised of thousands or millions of small particles. Points + generally receive a single shading sample each, which should take + normals into account, if present. + + While not technically UsdGeomPrimvars, the widths and normals also + have interpolation metadata. It's common for authored widths and normals + to have constant or varying interpolation.""" +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + int64[] ids ( + doc = """Ids are optional; if authored, the ids array should be the same + length as the points array, specifying (at each timesample if + point identities are changing) the id of each point. The + type is signed intentionally, so that clients can encode some + binary state on Id'd points without adding a separate + primvar.""" + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + float[] widths ( + doc = """Widths are defined as the diameter of the points, in + object space. 'widths' is not a generic Primvar, but + the number of elements in this attribute will be determined by + its 'interpolation'. See . If + 'widths' and 'primvars:widths' are both specified, the latter + has precedence.""" + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class PointInstancer "PointInstancer" ( + doc = """Encodes vectorized instancing of multiple, potentially + animated, prototypes (object/instance masters), which can be arbitrary + prims/subtrees on a UsdStage. + + PointInstancer is a \"multi instancer\", as it allows multiple prototypes + to be scattered among its \"points\". We use a UsdRelationship + prototypes to identify and order all of the possible prototypes, by + targeting the root prim of each prototype. The ordering imparted by + relationships associates a zero-based integer with each prototype, and + it is these integers we use to identify the prototype of each instance, + compactly, and allowing prototypes to be swapped out without needing to + reauthor all of the per-instance data. + + The PointInstancer schema is designed to scale to billions of instances, + which motivates the choice to split the per-instance transformation into + position, (quaternion) orientation, and scales, rather than a + 4x4 matrix per-instance. In addition to requiring fewer bytes even if + all elements are authored (32 bytes vs 64 for a single-precision 4x4 + matrix), we can also be selective about which attributes need to animate + over time, for substantial data reduction in many cases. + + Note that PointInstancer is not a Gprim, since it is not a graphical + primitive by any stretch of the imagination. It is, however, + Boundable, since we will sometimes want to treat the entire PointInstancer + similarly to a procedural, from the perspective of inclusion or framing. + + Varying Instance Identity over Time + + PointInstancers originating from simulations often have the characteristic + that points/instances are \"born\", move around for some time period, and then + die (or leave the area of interest). In such cases, billions of instances + may be birthed over time, while at any specific time, only a much + smaller number are actually alive. To encode this situation efficiently, + the simulator may re-use indices in the instance arrays, when a particle + dies, its index will be taken over by a new particle that may be birthed in + a much different location. This presents challenges both for + identity-tracking, and for motion-blur. + + We facilitate identity tracking by providing an optional, animatable + ids attribute, that specifies the 64 bit integer ID of the particle + at each index, at each point in time. If the simulator keeps monotonically + increasing a particle-count each time a new particle is birthed, it will + serve perfectly as particle ids. + + We facilitate motion blur for varying-topology particle streams by + optionally allowing per-instance velocities and angularVelocities + to be authored. If instance transforms are requested at a time between + samples and either of the velocity attributes is authored, then we will + not attempt to interpolate samples of positions or orientations. + If not authored, and the bracketing samples have the same length, then we + will interpolate. + + Computing an Instance Transform + + Each instance's transformation is a combination of the SRT affine transform + described by its scale, orientation, and position, applied after + (i.e. less locally than) the local to parent transformation computed at + the root of the prototype it is instancing. + + If your processing of prototype geometry naturally takes into account the + transform of the prototype root, then this term can be omitted from the + computation of each instance transform, and this can be controlled when + computing instance transformation matrices using the + UsdGeomPointInstancer::PrototypeXformInclusion enumeration. + + To understand the computation of the instance transform, in order to put + an instance of a PointInstancer into the space of the PointInstancer's + parent prim we do the following: + + 1. Apply (most locally) the authored local to parent transformation for + prototypes[protoIndices[i]] + 2. If *scales* is authored, next apply the scaling matrix from *scales[i]* + 3. If *orientations* is authored: **if *angularVelocities* is authored**, + first multiply *orientations[i]* by the unit quaternion derived by scaling + *angularVelocities[i]* by the \"time differential\" + from the left-bracketing timeSample for *orientation* to the requested + evaluation time *t*, storing the result in *R*, **else** assign *R* + directly from *orientations[i]*. Apply the rotation matrix derived + from *R*. + 4. Apply the translation derived from *positions[i]*. If *velocities* is + authored, apply the translation deriving from *velocities[i]* scaled by + the time differential from the left-bracketing timeSample for *positions* + to the requested evaluation time *t*. + 5. Least locally, apply the transformation authored on the PointInstancer + prim itself (or the UsdGeomImageable::ComputeLocalToWorldTransform() of the + PointInstancer to put the instance directly into world space) + + If neither *velocities* nor *angularVelocities* are authored, we fallback to + standard position and orientation computation logic (using linear + interpolation between timeSamples) as described by + . + + \\anchor UsdGeom_PITimeScaling + Scaling Velocities for Interpolation + + When computing time-differentials by which to apply velocity or + angularVelocity to positions or orientations, we must scale by + ( 1.0 / UsdStage::GetTimeCodesPerSecond() ), because velocities are recorded + in units/second, while we are interpolating in UsdTimeCode ordinates. + + We provide both high and low-level API's for dealing with the + transformation as a matrix, both will compute the instance matrices using + multiple threads; the low-level API allows the client to cache unvarying + inputs so that they need not be read duplicately when computing over + time. + + See also . + + Primvars on PointInstancer + + \"Primvars\" authored on a PointInstancer prim should + always be applied to each instance with constant interpolation at + the root of the instance. When you are authoring primvars on a + PointInstancer, think about it as if you were authoring them on a + point-cloud (e.g. a UsdGeomPoints gprim). The same + interpolation rules for points apply here, substituting + \"instance\" for \"point\". + + In other words, the (constant) value extracted for each instance + from the authored primvar value depends on the authored interpolation + and elementSize of the primvar, as follows: + - constant or uniform : the entire authored value of the + primvar should be applied exactly to each instance. + - varying, vertex, or faceVarying: the first + elementSize elements of the authored primvar array should be assigned to + instance zero, the second elementSize elements should be assigned to + instance one, and so forth. + + + Masking Instances: \"Deactivating\" and Invising + + Often a PointInstancer is created \"upstream\" in a graphics pipeline, and + the needs of \"downstream\" clients necessitate eliminating some of the + instances from further consideration. Accomplishing this pruning by + re-authoring all of the per-instance attributes is not very attractive, + since it may mean destructively editing a large quantity of data. We + therefore provide means of \"masking\" instances by ID, such that the + instance data is unmolested, but per-instance transform and primvar data + can be retrieved with the no-longer-desired instances eliminated from the + (smaller) arrays. PointInstancer allows two independent means of masking + instances by ID, each with different features that meet the needs of + various clients in a pipeline. Both pruning features' lists of ID's are + combined to produce the mask returned by ComputeMaskAtTime(). + + \\note If a PointInstancer has no authored ids attribute, the masking + features will still be available, with the integers specifying element + position in the protoIndices array rather than ID. + + \\subsection UsdGeomPointInstancer_inactiveIds InactiveIds: List-edited, Unvarying Masking + + The first masking feature encodes a list of IDs in a list-editable metadatum + called inactiveIds, which, although it does not have any similar + impact to stage population as \"prim activation\", + it shares with that feature that its application is uniform over all time. + Because it is list-editable, we can sparsely add and remove instances + from it in many layers. + + This sparse application pattern makes inactiveIds a good choice when + further downstream clients may need to reverse masking decisions made + upstream, in a manner that is robust to many kinds of future changes to + the upstream data. + + See ActivateId(), ActivateIds(), DeactivateId(), DeactivateIds(), + ActivateAllIds() + + \\subsection UsdGeomPointInstancer_invisibleIds invisibleIds: Animatable Masking + + The second masking feature encodes a list of IDs in a time-varying + Int64Array-valued UsdAttribute called invisibleIds , since it shares + with \"Imageable visibility\" + the ability to animate object visibility. + + Unlike inactiveIds, overriding a set of opinions for invisibleIds + is not at all straightforward, because one will, in general need to + reauthor (in the overriding layer) **all** timeSamples for the attribute + just to change one Id's visibility state, so it cannot be authored + sparsely. But it can be a very useful tool for situations like encoding + pre-computed camera-frustum culling of geometry when either or both of + the instances or the camera is animated. + + See VisId(), VisIds(), InvisId(), InvisIds(), VisAllIds() + + Processing and Not Processing Prototypes + + Any prim in the scenegraph can be targeted as a prototype by the + prototypes relationship. We do not, however, provide a specific + mechanism for identifying prototypes as geometry that should not be drawn + (or processed) in their own, local spaces in the scenegraph. We + encourage organizing all prototypes as children of the PointInstancer + prim that consumes them, and pruning \"raw\" processing and drawing + traversals when they encounter a PointInstancer prim; this is what the + UsdGeomBBoxCache and UsdImaging engines do. + + There is a pattern one can deploy for organizing the prototypes + such that they will automatically be skipped by basic UsdPrim::GetChildren() + or UsdPrimRange traversals. Usd prims each have a + \"specifier\" of \"def\", \"over\", or \"class\". The + default traversals skip over prims that are \"pure overs\" or classes. So + to protect prototypes from all generic traversals and processing, place + them under a prim that is just an \"over\". For example, + \\code + 01 def PointInstancer \"Crowd_Mid\" + 02 { + 03 rel prototypes = [ , ] + 04 + 05 over \"Prototypes\" + 06 { + 07 def \"MaleThin_Business\" ( + 08 references = [@MaleGroupA/usd/MaleGroupA.usd@] + 09 variants = { + 10 string modelingVariant = \"Thin\" + 11 string costumeVariant = \"BusinessAttire\" + 12 } + 13 ) + 14 { ... } + 15 + 16 def \"MaleThin_Casual\" + 17 ... + 18 } + 19 } + \\endcode + """ +) +{ + vector3f[] accelerations ( + doc = """If authored, per-instance 'accelerations' will be used with + velocities to compute positions between samples for the 'positions' + attribute rather than interpolating between neighboring 'positions' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + vector3f[] angularVelocities ( + doc = """If authored, per-instance angular velocity vector to be used for + interoplating orientations. Angular velocities should be considered + mandatory if both protoIndices and orientations are animated. + Angular velocity is measured in degrees per second. To convert + to degrees per UsdTimeCode, divide by + UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + int64[] ids ( + doc = """Ids are optional; if authored, the ids array should be the same + length as the protoIndices array, specifying (at each timeSample if + instance identities are changing) the id of each instance. The + type is signed intentionally, so that clients can encode some + binary state on Id'd instances without adding a separate primvar. + See also \\ref UsdGeomPointInstancer_varyingTopo""" + ) + int64[] invisibleIds = [] ( + doc = """A list of id's to make invisible at the evaluation time. + See .""" + ) + quath[] orientations ( + doc = """If authored, per-instance orientation of each instance about its + prototype's origin, represented as a unit length quaternion, which + allows us to encode it with sufficient precision in a compact GfQuath. + + It is client's responsibility to ensure that authored quaternions are + unit length; the convenience API below for authoring orientations from + rotation matrices will ensure that quaternions are unit length, though + it will not make any attempt to select the \"better (for interpolation + with respect to neighboring samples)\" of the two possible quaternions + that encode the rotation. + + See also .""" + ) + point3f[] positions ( + doc = """Required property. Per-instance position. See also + .""" + ) + int[] protoIndices ( + doc = """Required property. Per-instance index into + prototypes relationship that identifies what geometry should be + drawn for each instance. Topology attribute - can be animated, + but at a potential performance impact for streaming.""" + ) + rel prototypes ( + doc = """Required property. Orders and targets the prototype root + prims, which can be located anywhere in the scenegraph that is convenient, + although we promote organizing prototypes as children of the + PointInstancer. The position of a prototype in this relationship defines + the value an instance would specify in the protoIndices attribute to + instance that prototype. Since relationships are uniform, this property + cannot be animated.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + float3[] scales ( + doc = """If authored, per-instance scale to be applied to + each instance, before any rotation is applied. + + See also .""" + ) + vector3f[] velocities ( + doc = """If provided, per-instance 'velocities' will be used to + compute positions between samples for the 'positions' attribute, + rather than interpolating between neighboring 'positions' samples. + Velocities should be considered mandatory if both protoIndices + and positions are animated. Velocity is measured in position + units per second, as per most simulation software. To convert to + position units per UsdTimeCode, divide by + UsdStage::GetTimeCodesPerSecond(). + + See also + .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Camera "Camera" ( + doc = """Transformable camera. + + Describes optical properties of a camera via a common set of attributes + that provide control over the camera's frustum as well as its depth of + field. For stereo, the left and right camera are individual prims tagged + through the \"stereoRole attribute\". + + There is a corresponding class GfCamera, which can hold the state of a + camera (at a particular time). and + convert between a USD camera prim and + a GfCamera. + + To obtain the camera's location in world space, call the following on a + UsdGeomCamera 'camera': + \\code + GfMatrix4d camXform = camera.ComputeLocalToWorldTransform(time); + \\endcode + \\note + Cameras in USD are always \"Y up\", regardless of the stage's orientation + (i.e. UsdGeomGetStageUpAxis()). This means that the inverse of + 'camXform' (the VIEW half of the MODELVIEW transform in OpenGL parlance) + will transform the world such that the camera is at the origin, looking + down the -Z axis, with +Y as the up axis, and +X pointing to the right. + This describes a __right handed coordinate system__. + + Units of Measure for Camera Properties + + Despite the familiarity of millimeters for specifying some physical + camera properties, UsdGeomCamera opts for greater consistency with all + other UsdGeom schemas, which measure geometric properties in scene units, + as determined by UsdGeomGetStageMetersPerUnit(). We do make a + concession, however, in that lens and filmback properties are measured in + __tenths of a scene unit__ rather than \"raw\" scene units. This means + that with the fallback value of .01 for _metersPerUnit_ - i.e. scene unit + of centimeters - then these \"tenth of scene unit\" properties are + effectively millimeters. + + \\note If one adds a Camera prim to a UsdStage whose scene unit is not + centimeters, the fallback values for filmback properties will be + incorrect (or at the least, unexpected) in an absolute sense; however, + proper imaging through a \"default camera\" with focusing disabled depends + only on ratios of the other properties, so the camera is still usable. + However, it follows that if even one property is authored in the correct + scene units, then they all must be. + + + \\sa \\ref UsdGeom_LinAlgBasics + """ +) +{ + float4[] clippingPlanes = [] ( + doc = """Additional, arbitrarily oriented clipping planes. + A vector (a,b,c,d) encodes a clipping plane that cuts off + (x,y,z) with a * x + b * y + c * z + d * 1 < 0 where (x,y,z) + are the coordinates in the camera's space.""" + ) + float2 clippingRange = (1, 1000000) ( + doc = """Near and far clipping distances in scene units; see + .""" + ) + float exposure = 0 ( + doc = """Exposure adjustment, as a log base-2 value. The default + of 0.0 has no effect. A value of 1.0 will double the + image-plane intensities in a rendered image; a value of + -1.0 will halve them.""" + ) + float focalLength = 50 ( + doc = """Perspective focal length in tenths of a scene unit; see + .""" + ) + float focusDistance = 0 ( + doc = """Distance from the camera to the focus plane in scene units; see + .""" + ) + float fStop = 0 ( + doc = "Lens aperture. Defaults to 0.0, which turns off focusing." + ) + float horizontalAperture = 20.955 ( + doc = """Horizontal aperture in tenths of a scene unit; see + . Default is the equivalent of + the standard 35mm spherical projector aperture.""" + ) + float horizontalApertureOffset = 0 ( + doc = """Horizontal aperture offset in the same units as + horizontalAperture. Defaults to 0.""" + ) + token projection = "perspective" ( + allowedTokens = ["perspective", "orthographic"] + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + double shutter:close = 0 ( + doc = """Frame relative shutter close time, analogous comments from + shutter:open apply. A value greater or equal to shutter:open + should be authored, otherwise there is no exposure and a + renderer should produce a black image.""" + ) + double shutter:open = 0 ( + doc = """Frame relative shutter open time in UsdTimeCode units (negative + value indicates that the shutter opens before the current + frame time). Used for motion blur.""" + ) + uniform token stereoRole = "mono" ( + allowedTokens = ["mono", "left", "right"] + doc = """If different from mono, the camera is intended to be the left + or right camera of a stereo setup.""" + ) + float verticalAperture = 15.2908 ( + doc = """Vertical aperture in tenths of a scene unit; see + . Default is the equivalent of + the standard 35mm spherical projector aperture.""" + ) + float verticalApertureOffset = 0 ( + doc = """Vertical aperture offset in the same units as + verticalAperture. Defaults to 0.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "GeomModelAPI" ( + doc = """UsdGeomModelAPI extends the generic UsdModelAPI schema with + geometry specific concepts such as cached extents for the entire model, + constraint targets, and geometry-inspired extensions to the payload + lofting process. + + As described in GetExtentsHint() below, it is useful to cache extents + at the model level. UsdGeomModelAPI provides schema for computing and + storing these cached extents, which can be consumed by UsdGeomBBoxCache to + provide fast access to precomputed extents that will be used as the model's + bounds ( see UsdGeomBBoxCache::UsdGeomBBoxCache() ). + + Draw Modes + + Draw modes provide optional alternate imaging behavior for USD subtrees with + kind model. model:drawMode (which is inheritable) and + model:applyDrawMode (which is not) are resolved into a decision to stop + traversing the scene graph at a certain point, and replace a USD subtree + with proxy geometry. + + The value of model:drawMode determines the type of proxy geometry: + - origin - Draw the model-space basis vectors of the replaced prim. + - bounds - Draw the model-space bounding box of the replaced prim. + - cards - Draw textured quads as a placeholder for the replaced prim. + - default - An explicit opinion to draw the USD subtree as normal. + - inherited - Defer to the parent opinion. + + model:drawMode falls back to _inherited_ so that a whole scene, + a large group, or all prototypes of a model hierarchy PointInstancer can + be assigned a draw mode with a single attribute edit. If no draw mode is + explicitly set in a hierarchy, the resolved value is _default_. + + model:applyDrawMode is meant to be written when an asset is authored, + and provides flexibility for different asset types. For example, + a character assembly (composed of character, clothes, etc) might have + model:applyDrawMode set at the top of the subtree so the whole group + can be drawn as a single card object. An effects subtree might have + model:applyDrawMode set at a lower level so each particle + group draws individually. + + Models of kind component are treated as if model:applyDrawMode + were true. This means a prim is drawn with proxy geometry when: the + prim has kind component, and/or model:applyDrawMode is set; and + the prim's resolved value for model:drawMode is not _default_. + + Cards Geometry + + The specific geometry used in cards mode is controlled by the + model:cardGeometry attribute: + - cross - Generate a quad normal to each basis direction and negative. + Locate each quad so that it bisects the model extents. + - box - Generate a quad normal to each basis direction and negative. + Locate each quad on a face of the model extents, facing out. + - fromTexture - Generate a quad for each supplied texture from + attributes stored in that texture's metadata. + + For cross and box mode, the extents are calculated for purposes + default, proxy, and render, at their earliest authored time. + If the model has no textures, all six card faces are rendered using + model:drawModeColor. If one or more textures are present, only axes + with one or more textures assigned are drawn. For each axis, if both + textures (positive and negative) are specified, they'll be used on the + corresponding card faces; if only one texture is specified, it will be + mapped to the opposite card face after being flipped on the texture's + s-axis. Any card faces with invalid asset paths will be drawn with + model:drawModeColor. + + Both model:cardGeometry and model:drawModeColor should be + authored on the prim where the draw mode takes effect, since these + attributes are not inherited. + + For fromTexture mode, only card faces with valid textures assigned + are drawn. The geometry is generated by pulling the worldtoscreen + attribute out of texture metadata. This is expected to be a 4x4 matrix + mapping the model-space position of the card quad to the clip-space quad + with corners (-1,-1,0) and (1,1,0). The card vertices are generated by + transforming the clip-space corners by the inverse of worldtoscreen. + Textures are mapped so that (s) and (t) map to (+x) and (+y) in clip space. + If the metadata cannot be read in the right format, or the matrix can't + be inverted, the card face is not drawn. + + All card faces are drawn and textured as single-sided. + + \\todo CreatePayload() """ +) +{ + uniform bool model:applyDrawMode = 0 ( + doc = """If true, and the resolved value of model:drawMode is + non-default, apply an alternate imaging mode to this prim. See + \\ref UsdGeomModelAPI_drawMode.""" + ) + uniform token model:cardGeometry = "cross" ( + allowedTokens = ["cross", "box", "fromTexture"] + doc = """The geometry to generate for imaging prims inserted for \\em + cards imaging mode. See for + geometry descriptions.""" + ) + asset model:cardTextureXNeg ( + doc = """In cards imaging mode, the texture applied to the X- quad. + The texture axes (s,t) are mapped to model-space axes (y, -z).""" + ) + asset model:cardTextureXPos ( + doc = """In cards imaging mode, the texture applied to the X+ quad. + The texture axes (s,t) are mapped to model-space axes (-y, -z).""" + ) + asset model:cardTextureYNeg ( + doc = """In cards imaging mode, the texture applied to the Y- quad. + The texture axes (s,t) are mapped to model-space axes (-x, -z).""" + ) + asset model:cardTextureYPos ( + doc = """In cards imaging mode, the texture applied to the Y+ quad. + The texture axes (s,t) are mapped to model-space axes (x, -z).""" + ) + asset model:cardTextureZNeg ( + doc = """In cards imaging mode, the texture applied to the Z- quad. + The texture axes (s,t) are mapped to model-space axes (-x, -y).""" + ) + asset model:cardTextureZPos ( + doc = """In cards imaging mode, the texture applied to the Z+ quad. + The texture axes (s,t) are mapped to model-space axes (x, -y).""" + ) + uniform token model:drawMode = "inherited" ( + allowedTokens = ["origin", "bounds", "cards", "default", "inherited"] + doc = """Alternate imaging mode; applied to this prim or child prims + where model:applyDrawMode is true, or where the prim + has kind component. See \\ref UsdGeomModelAPI_drawMode + for mode descriptions.""" + ) + uniform float3 model:drawModeColor = (0.18, 0.18, 0.18) ( + doc = """The base color of imaging prims inserted for alternate + imaging modes. For origin and bounds modes, this + controls line color; for cards mode, this controls the + fallback quad color.""" + ) +} + +class "MotionAPI" ( + doc = '''UsdGeomMotionAPI encodes data that can live on any prim that + may affect computations involving: + - computed motion for motion blur + - sampling for motion blur + + The "motion:blurScale" attribute allows + artists to scale the __amount__ of motion blur to be rendered for parts + of the scene without changing the recorded animation. See + for use and implementation details. + + ''' +) +{ + float motion:blurScale = 1 ( + doc = """BlurScale is an __inherited__ float attribute that stipulates + the rendered motion blur (as typically specified via UsdGeomCamera's + _shutter:open_ and _shutter:close_ properties) should be scaled for + __all objects__ at and beneath the prim in namespace on which the + _motion:blurScale_ value is specified. + + Without changing any other data in the scene, _blurScale_ allows artists to + \"dial in\" the amount of blur on a per-object basis. A _blurScale_ + value of zero removes all blur, a value of 0.5 reduces blur by half, + and a value of 2.0 doubles the blur. The legal range for _blurScale_ + is [0, inf), although very high values may result in extremely expensive + renders, and may exceed the capabilities of some renderers. + + Although renderers are free to implement this feature however they see + fit, see for our guidance on implementing + the feature universally and efficiently. + + \\sa ComputeMotionBlurScale() + """ + ) + int motion:nonlinearSampleCount = 3 ( + doc = """Determines the number of position or transformation samples + created when motion is described by attributes contributing non-linear + terms. + + To give an example, imagine an application (such as a + renderer) consuming 'points' and the USD document also + contains 'accelerations' for the same prim. Unless the + application can consume these 'accelerations' itself, an + intermediate layer has to compute samples within the sampling + interval for the point positions based on the value of + 'points', 'velocities' and 'accelerations'. The number of these + samples is given by 'nonlinearSampleCount'. The samples are + equally spaced within the sampling interval. + + Another example involves the PointInstancer where + 'nonlinearSampleCount' is relevant when 'angularVelocities' + or 'accelerations' are authored. + + 'nonlinearSampleCount' is an **inherited** attribute, also + see ComputeNonlinearSampleCount()""" + ) + float motion:velocityScale = 1 ( + doc = """\\deprecated + + VelocityScale is an **inherited** float attribute that + velocity-based schemas (e.g. PointBased, PointInstancer) can consume + to compute interpolated positions and orientations by applying + velocity and angularVelocity, which is required for interpolating + between samples when topology is varying over time. Although these + quantities are generally physically computed by a simulator, sometimes + we require more or less motion-blur to achieve the desired look. + VelocityScale allows artists to dial-in, as a post-sim correction, + a scale factor to be applied to the velocity prior to computing + interpolated positions from it.""" + ) +} + +class "XformCommonAPI" ( + doc = """This class provides API for authoring and retrieving a standard set + of component transformations which include a scale, a rotation, a + scale-rotate pivot and a translation. The goal of the API is to enhance + component-wise interchange. It achieves this by limiting the set of allowed + basic ops and by specifying the order in which they are applied. In addition + to the basic set of ops, the 'resetXformStack' bit can also be set to + indicate whether the underlying xformable resets the parent transformation + (i.e. does not inherit it's parent's transformation). + + \\sa UsdGeomXformCommonAPI::GetResetXformStack() + \\sa UsdGeomXformCommonAPI::SetResetXformStack() + + The operator-bool for the class will inform you whether an existing + xformable is compatible with this API. + + The scale-rotate pivot is represented by a pair of (translate, + inverse-translate) xformOps around the scale and rotate operations. + The rotation operation can be any of the six allowed Euler angle sets. + \\sa UsdGeomXformOp::Type. + + The xformOpOrder of an xformable that has all of the supported basic ops + is as follows: + [\"xformOp:translate\", \"xformOp:translate:pivot\", \"xformOp:rotateXYZ\", + \"xformOp:scale\", \"!invert!xformOp:translate:pivot\"]. + + It is worth noting that all of the ops are optional. For example, an + xformable may have only a translate or a rotate. It would still be + considered as compatible with this API. Individual SetTranslate(), + SetRotate(), SetScale() and SetPivot() methods are provided by this API + to allow such sparse authoring.""" +) +{ +} + +class HermiteCurves "HermiteCurves" ( + doc = """This schema specifies a cubic hermite interpolated curve batch as + sometimes used for defining guides for animation. While hermite curves can + be useful because they interpolate through their control points, they are + not well supported by high-end renderers for imaging. Therefore, while we + include this schema for interchange, we strongly recommend the use of + UsdGeomBasisCurves as the representation of curves intended to be rendered + (ie. hair or grass). Hermite curves can be converted to a Bezier + representation (though not from Bezier back to Hermite in general). + + Point Interpolation + + The initial cubic curve segment is defined by the first two points and + first two tangents. Additional segments are defined by additional + point / tangent pairs. The number of segments for each non-batched hermite + curve would be len(curve.points) - 1. The total number of segments + for the batched UsdGeomHermiteCurves representation is + len(points) - len(curveVertexCounts). + + Primvar, Width, and Normal Interpolation + + Primvar interpolation is not well specified for this type as it is not + intended as a rendering representation. We suggest that per point + primvars would be linearly interpolated across each segment and should + be tagged as 'varying'. + + It is not immediately clear how to specify cubic or 'vertex' interpolation + for this type, as we lack a specification for primvar tangents. This + also means that width and normal interpolation should be restricted to + varying (linear), uniform (per curve element), or constant (per prim). + """ +) +{ + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + int[] curveVertexCounts ( + doc = """Curves-derived primitives can represent multiple distinct, + potentially disconnected curves. The length of 'curveVertexCounts' + gives the number of such curves, and each element describes the + number of vertices in the corresponding curve""" + ) + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + vector3f[] tangents = [] ( + doc = """Defines the outgoing trajectory tangent for each point. + Tangents should be the same size as the points attribute.""" + ) + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also .""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + float[] widths ( + doc = """Provides width specification for the curves, whose application + will depend on whether the curve is oriented (normals are defined for + it), in which case widths are \"ribbon width\", or unoriented, in which + case widths are cylinder width. 'widths' is not a generic Primvar, + but the number of elements in this attribute will be determined by + its 'interpolation'. See . If 'widths' + and 'primvars:widths' are both specified, the latter has precedence.""" + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + diff --git a/blender/lib/usd/usdGeom/resources/plugInfo.json b/blender/lib/usd/usdGeom/resources/plugInfo.json new file mode 100644 index 0000000..57b5e13 --- /dev/null +++ b/blender/lib/usd/usdGeom/resources/plugInfo.json @@ -0,0 +1,370 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "constraintTargetIdentifier": { + "appliesTo": [ + "attributes" + ], + "default": "", + "documentation": "Unique identifier within a model's namespace for an matrix-valued attribute representing a constraint target", + "type": "token" + }, + "elementSize": { + "appliesTo": [ + "attributes" + ], + "default": 1, + "displayGroup": "Primvars", + "documentation": "The number of values in a primvar's value array that must be aggregated for each element on the primitive.", + "type": "int" + }, + "inactiveIds": { + "appliesTo": [ + "prims" + ], + "type": "int64listop" + }, + "interpolation": { + "appliesTo": [ + "attributes" + ], + "default": "constant", + "displayGroup": "Primvars", + "documentation": "How a primvar interpolates across a primitive; equivalent to RenderMan's 'class specifier'", + "type": "token" + }, + "metersPerUnit": { + "appliesTo": [ + "layers" + ], + "default": 0.01, + "displayGroup": "Stage", + "type": "double" + }, + "unauthoredValuesIndex": { + "appliesTo": [ + "attributes" + ], + "default": -1, + "displayGroup": "Primvars", + "documentation": "The index that represents unauthored values in the indices array of an indexed primvar.", + "type": "int" + }, + "upAxis": { + "appliesTo": [ + "layers" + ], + "default": "Y", + "displayGroup": "Stage", + "type": "token" + } + }, + "Types": { + "UsdGeomBasisCurves": { + "alias": { + "UsdSchemaBase": "BasisCurves" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomCurves" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomBoundable": { + "alias": { + "UsdSchemaBase": "Boundable" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "schemaKind": "abstractTyped" + }, + "UsdGeomCamera": { + "alias": { + "UsdSchemaBase": "Camera" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomCapsule": { + "alias": { + "UsdSchemaBase": "Capsule" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomCone": { + "alias": { + "UsdSchemaBase": "Cone" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomCube": { + "alias": { + "UsdSchemaBase": "Cube" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomCurves": { + "alias": { + "UsdSchemaBase": "Curves" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomPointBased" + ], + "implementsComputeExtent": true, + "schemaKind": "abstractTyped" + }, + "UsdGeomCylinder": { + "alias": { + "UsdSchemaBase": "Cylinder" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomGprim": { + "alias": { + "UsdSchemaBase": "Gprim" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomBoundable" + ], + "schemaKind": "abstractTyped" + }, + "UsdGeomHermiteCurves": { + "alias": { + "UsdSchemaBase": "HermiteCurves" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomCurves" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomImageable": { + "alias": { + "UsdSchemaBase": "Imageable" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "abstractTyped" + }, + "UsdGeomMesh": { + "alias": { + "UsdSchemaBase": "Mesh" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomPointBased" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomModelAPI": { + "alias": { + "UsdSchemaBase": "GeomModelAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdGeomMotionAPI": { + "alias": { + "UsdSchemaBase": "MotionAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdGeomNurbsCurves": { + "alias": { + "UsdSchemaBase": "NurbsCurves" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomCurves" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomNurbsPatch": { + "alias": { + "UsdSchemaBase": "NurbsPatch" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomPointBased" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomPlane": { + "alias": { + "UsdSchemaBase": "Plane" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomPointBased": { + "alias": { + "UsdSchemaBase": "PointBased" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "implementsComputeExtent": true, + "schemaKind": "abstractTyped" + }, + "UsdGeomPointInstancer": { + "alias": { + "UsdSchemaBase": "PointInstancer" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomBoundable" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomPoints": { + "alias": { + "UsdSchemaBase": "Points" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomPointBased" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomPrimvarsAPI": { + "alias": { + "UsdSchemaBase": "PrimvarsAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "nonAppliedAPI" + }, + "UsdGeomScope": { + "alias": { + "UsdSchemaBase": "Scope" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomImageable" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomSphere": { + "alias": { + "UsdSchemaBase": "Sphere" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdGeomSubset": { + "alias": { + "UsdSchemaBase": "GeomSubset" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomVisibilityAPI": { + "alias": { + "UsdSchemaBase": "VisibilityAPI" + }, + "apiSchemaCanOnlyApplyTo": [ + "Imageable" + ], + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdGeomXform": { + "alias": { + "UsdSchemaBase": "Xform" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "schemaKind": "concreteTyped" + }, + "UsdGeomXformCommonAPI": { + "alias": { + "UsdSchemaBase": "XformCommonAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "nonAppliedAPI" + }, + "UsdGeomXformable": { + "alias": { + "UsdSchemaBase": "Xformable" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomImageable" + ], + "schemaKind": "abstractTyped" + } + } + }, + "LibraryPath": "", + "Name": "usdGeom", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdGeom/resources/usdGeom/schema.usda b/blender/lib/usd/usdGeom/resources/usdGeom/schema.usda new file mode 100644 index 0000000..a0b1e33 --- /dev/null +++ b/blender/lib/usd/usdGeom/resources/usdGeom/schema.usda @@ -0,0 +1,2504 @@ +#usda 1.0 +( + "This file describes the USD Geometric schemata for code generation." + subLayers = [ + @usd/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdGeom" + string libraryPath = "pxr/usd/usdGeom" + # string libraryPrefix = "UsdGeom" + # string tokensPrefix = "UsdGeom" + bool useLiteralIdentifier = 0 + dictionary libraryTokens = { + dictionary interpolation = { + string doc = """UsdGeomPrimvar - How a Primvar interpolates + across a primitive; equivalent to RenderMan's \\ref Usd_InterpolationVals "class specifier" """ + } + dictionary elementSize = { + string doc = """UsdGeomPrimvar - The number of values in the + value array that must be aggregated for each element on the + primitive.""" + } + dictionary unauthoredValuesIndex = { + string doc = """UsdGeomPrimvar - The index that represents + unauthored values in the indices array of an indexed primvar.""" + } + dictionary constant ={ + string doc = """Possible value for UsdGeomPrimvar::SetInterpolation. + Default value for UsdGeomPrimvar::GetInterpolation. One value + remains constant over the entire surface primitive.""" + } + dictionary uniform = { + string doc = """Possible value for UsdGeomPrimvar::SetInterpolation. + One value remains constant for each uv patch segment of the + surface primitive (which is a \\em face for meshes).""" + } + dictionary varying = { + string doc = """Possible value for UsdGeomPrimvar::SetInterpolation. + Four values are interpolated over each uv patch segment of the + surface. Bilinear interpolation is used for interpolation + between the four values.""" + } + dictionary vertex = { + string doc = """Possible value for UsdGeomPrimvar::SetInterpolation. + Values are interpolated between each vertex in the surface + primitive. The basis function of the surface is used for + interpolation between vertices.""" + } + dictionary faceVarying = { + string doc = """Possible value for UsdGeomPrimvar::SetInterpolation. + For polygons and subdivision surfaces, four values are + interpolated over each face of the mesh. Bilinear interpolation + is used for interpolation between the four values.""" + } + dictionary upAxis = { + string doc = """Stage-level metadata that encodes a scene's + orientation as a token whose value can be "Y" or "Z".""" + } + dictionary metersPerUnit = { + string doc = """Stage-level metadata that encodes a scene's + linear unit of measure as meters per encoded unit.""" + } + dictionary partition = { + string doc = """A type of family of GeomSubsets. It implies + that every element appears exacly once in only one of the + subsets in the family.""" + } + dictionary nonOverlapping = { + string doc = """A type of family of GeomSubsets. It implies that + the elements in the various subsets belonging to the family are + mutually exclusive, i.e., an element that appears in one + subset may not belong to any other subset in the family.""" + } + dictionary unrestricted = { + string doc = """A type of family of GeomSubsets. It implies that + there are no restrictions w.r.t. the membership of elements in + the subsets. There could be overlapping members in subsets + belonging to the family and the union of all subsets in the + family may not contain all the elements.""" + } + dictionary hermite = { + string doc = """A deprecated basis token for + UsdGeomBasisCurves. Consumers of USD should transition + to using the UsdGeomHermiteCurves schema.""" + } + dictionary power = { + string doc = """A deprecated basis token for + UsdGeomBasisCurves.""" + } + } + } +) +{ +} + +class "Imageable" ( + inherits = + doc = """Base class for all prims that may require rendering or + visualization of some sort. The primary attributes of Imageable + are \\em visibility and \\em purpose, which each provide instructions for + what geometry should be included for processing by rendering and other + computations. + + \\deprecated Imageable also provides API for accessing primvars, which + has been moved to the UsdGeomPrimvarsAPI schema, because primvars can now + be applied on non-Imageable prim types. This API is planned + to be removed, UsdGeomPrimvarsAPI should be used directly instead.""" + customData = { + string extraIncludes = """ +#include "pxr/base/gf/bbox3d.h" +#include "pxr/usd/usdGeom/primvar.h" """ + } +) { + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = """Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.""" + ) + + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See \\ref UsdGeom_ImageablePurpose for more detail about how + \\em purpose is computed and used.""" + ) + rel proxyPrim ( + doc = """The \\em proxyPrim relationship allows us to link a + prim whose \\em purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + \\li In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + \\li DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + \\li With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".""" + ) +} + +class "VisibilityAPI" +( + inherits = + customData = { + token[] apiSchemaCanOnlyApplyTo = [ + "Imageable" + ] + } + doc = """ + UsdGeomVisibilityAPI introduces properties that can be used to author + visibility opinions. + + \\note + Currently, this schema only introduces the attributes that are used to + control purpose visibility. Later, this schema will define _all_ + visibility-related properties and UsdGeomImageable will no longer define + those properties. + + The purpose visibility attributes added by this schema, + _guideVisibility_, _proxyVisibility_, and _renderVisibility_ can each be + used to control visibility for geometry of the corresponding purpose + values, with the overall _visibility_ attribute acting as an + override. I.e., if _visibility_ evaluates to "invisible", purpose + visibility is invisible; otherwise, purpose visibility is determined by + the corresponding purpose visibility attribute. + + Note that the behavior of _guideVisibility_ is subtly different from the + _proxyVisibility_ and _renderVisibility_ attributes, in that "guide" + purpose visibility always evaluates to either "invisible" or "visible", + whereas the other attributes may yield computed values of "inherited" if + there is no authored opinion on the attribute or inherited from an + ancestor. This is motivated by the fact that, in Pixar"s user workflows, + we have never found a need to have all guides visible in a scene by + default, whereas we do find that flexibility useful for "proxy" and + "render" geometry. + + This schema can only be applied to UsdGeomImageable prims. The + UseGeomImageable schema provides API for computing the purpose visibility + values that result from the attributes introduced by this schema. + """ +) +{ + uniform token guideVisibility = "invisible" ( + allowedTokens = ["inherited", "invisible", "visible"] + doc = """ + This attribute controls visibility for geometry with purpose "guide". + + Unlike overall _visibility_, _guideVisibility_ is uniform, and + therefore cannot be animated. + + Also unlike overall _visibility_, _guideVisibility_ is tri-state, in + that a descendant with an opinion of "visible" overrides an ancestor + opinion of "invisible". + + The _guideVisibility_ attribute works in concert with the overall + _visibility_ attribute: The visibility of a prim with purpose "guide" + is determined by the inherited values it receives for the _visibility_ + and _guideVisibility_ attributes. If _visibility_ evaluates to + "invisible", the prim is invisible. If _visibility_ evaluates to + "inherited" and _guideVisibility_ evaluates to "visible", then the + prim is visible. __Otherwise, it is invisible.__ + """ + ) + + uniform token proxyVisibility = "inherited" ( + allowedTokens = ["inherited", "invisible", "visible"] + doc = """ + This attribute controls visibility for geometry with purpose "proxy". + + Unlike overall _visibility_, _proxyVisibility_ is uniform, and + therefore cannot be animated. + + Also unlike overall _visibility_, _proxyVisibility_ is tri-state, in + that a descendant with an opinion of "visible" overrides an ancestor + opinion of "invisible". + + The _proxyVisibility_ attribute works in concert with the overall + _visibility_ attribute: The visibility of a prim with purpose "proxy" + is determined by the inherited values it receives for the _visibility_ + and _proxyVisibility_ attributes. If _visibility_ evaluates to + "invisible", the prim is invisible. If _visibility_ evaluates to + "inherited" then: If _proxyVisibility_ evaluates to "visible", then + the prim is visible; if _proxyVisibility_ evaluates to "invisible", + then the prim is invisible; if _proxyVisibility_ evaluates to + "inherited", then the prim may either be visible or invisible, + depending on a fallback value determined by the calling context. + """ + ) + + uniform token renderVisibility = "inherited" ( + allowedTokens = ["inherited", "invisible", "visible"] + doc = """ + This attribute controls visibility for geometry with purpose + "render". + + Unlike overall _visibility_, _renderVisibility_ is uniform, and + therefore cannot be animated. + + Also unlike overall _visibility_, _renderVisibility_ is tri-state, in + that a descendant with an opinion of "visible" overrides an ancestor + opinion of "invisible". + + The _renderVisibility_ attribute works in concert with the overall + _visibility_ attribute: The visibility of a prim with purpose "render" + is determined by the inherited values it receives for the _visibility_ + and _renderVisibility_ attributes. If _visibility_ evaluates to + "invisible", the prim is invisible. If _visibility_ evaluates to + "inherited" then: If _renderVisibility_ evaluates to "visible", then + the prim is visible; if _renderVisibility_ evaluates to "invisible", + then the prim is invisible; if _renderVisibility_ evaluates to + "inherited", then the prim may either be visible or invisible, + depending on a fallback value determined by the calling context. + """ + ) +} + +class "PrimvarsAPI" ( + inherits = + doc = """UsdGeomPrimvarsAPI encodes geometric "primitive variables", + as UsdGeomPrimvar, which interpolate across a primitive's topology, + can override shader inputs, and inherit down namespace. + + \\section usdGeom_PrimvarFetchingAPI Which Method to Use to Retrieve Primvars + + While creating primvars is unambiguous (CreatePrimvar()), there are quite + a few methods available for retrieving primvars, making it potentially + confusing knowing which one to use. Here are some guidelines: + + \\li If you are populating a GUI with the primvars already available for + authoring values on a prim, use GetPrimvars(). + \\li If you want all of the "useful" (e.g. to a renderer) primvars + available at a prim, including those inherited from ancestor prims, use + FindPrimvarsWithInheritance(). Note that doing so individually for many + prims will be inefficient. + \\li To find a particular primvar defined directly on a prim, which may + or may not provide a value, use GetPrimvar(). + \\li To find a particular primvar defined on a prim or inherited from + ancestors, which may or may not provide a value, use + FindPrimvarWithInheritance(). + \\li To *efficiently* query for primvars using the overloads of + FindPrimvarWithInheritance() and FindPrimvarsWithInheritance(), one + must first cache the results of FindIncrementallyInheritablePrimvars() for + each non-leaf prim on the stage. """ + customData = { + token apiSchemaType = "nonApplied" + string extraIncludes = """ +#include "pxr/usd/usdGeom/primvar.h" """ + } +) { +} + +class "Xformable" ( + inherits = + customData = { + string extraIncludes = """ +#include "pxr/usd/usdGeom/xformOp.h" +#include """ + } + doc = """Base class for all transformable prims, which allows arbitrary + sequences of component affine transformations to be encoded. + + \\note + You may find it useful to review \\ref UsdGeom_LinAlgBasics while reading + this class description. + + Supported Component Transformation Operations + + UsdGeomXformable currently supports arbitrary sequences of the following + operations, each of which can be encoded in an attribute of the proper + shape in any supported precision: + \\li translate - 3D + \\li scale - 3D + \\li rotateX - 1D angle in degrees + \\li rotateY - 1D angle in degrees + \\li rotateZ - 1D angle in degrees + \\li rotateABC - 3D where ABC can be any combination of the six principle + Euler Angle sets: XYZ, XZY, YXZ, YZX, ZXY, ZYX. See + \\ref usdGeom_rotationPackingOrder "note on rotation packing order" + \\li orient - 4D (quaternion) + \\li transform - 4x4D + + Creating a Component Transformation + + To add components to a UsdGeomXformable prim, simply call AddXformOp() + with the desired op type, as enumerated in \\ref UsdGeomXformOp::Type, + and the desired precision, which is one of \\ref UsdGeomXformOp::Precision. + Optionally, you can also provide an "op suffix" for the operator that + disambiguates it from other components of the same type on the same prim. + Application-specific transform schemas can use the suffixes to fill a role + similar to that played by AbcGeom::XformOp's "Hint" enums for their own + round-tripping logic. + + We also provide specific "Add" API for each type, for clarity and + conciseness, e.g. AddTranslateOp(), AddRotateXYZOp() etc. + + AddXformOp() will return a UsdGeomXformOp object, which is a schema on a + newly created UsdAttribute that provides convenience API for authoring + and computing the component transformations. The UsdGeomXformOp can then + be used to author any number of timesamples and default for the op. + + Each successive call to AddXformOp() adds an operator that will be applied + "more locally" than the preceding operator, just as if we were pushing + transforms onto a transformation stack - which is precisely what should + happen when the operators are consumed by a reader. + + \\note + If you can, please try to use the UsdGeomXformCommonAPI, which wraps + the UsdGeomXformable with an interface in which Op creation is taken + care of for you, and there is a much higher chance that the data you + author will be importable without flattening into other DCC's, as it + conforms to a fixed set of Scale-Rotate-Translate Ops. + + \\sa \\ref usdGeom_xformableExamples "Using the Authoring API" + + Data Encoding and Op Ordering + + Because there is no "fixed schema" of operations, all of the attributes + that encode transform operations are dynamic, and are scoped in + the namespace "xformOp". The second component of an attribute's name provides + the \\em type of operation, as listed above. An "xformOp" attribute can + have additional namespace components derived from the \\em opSuffix argument + to the AddXformOp() suite of methods, which provides a preferred way of + naming the ops such that we can have multiple "translate" ops with unique + attribute names. For example, in the attribute named + "xformOp:translate:maya:pivot", "translate" is the type of operation and + "maya:pivot" is the suffix. + + The following ordered list of attribute declarations in usda + define a basic Scale-Rotate-Translate with XYZ Euler angles, wherein the + translation is double-precision, and the remainder of the ops are single, + in which we will: + +
    +
  1. Scale by 2.0 in each dimension +
  2. Rotate about the X, Y, and Z axes by 30, 60, and 90 degrees, respectively +
  3. Translate by 100 units in the Y direction +
+ + \\code + float3 xformOp:rotateXYZ = (30, 60, 90) + float3 xformOp:scale = (2, 2, 2) + double3 xformOp:translate = (0, 100, 0) + uniform token[] xformOpOrder = [ "xformOp:translate", "xformOp:rotateXYZ", "xformOp:scale" ] + \\endcode + + The attributes appear in the dictionary order in which USD, by default, + sorts them. To ensure the ops are recovered and evaluated in the correct + order, the schema introduces the **xformOpOrder** attribute, which + contains the names of the op attributes, in the precise sequence in which + they should be pushed onto a transform stack. **Note** that the order is + opposite to what you might expect, given the matrix algebra described in + \\ref UsdGeom_LinAlgBasics. This also dictates order of op creation, + since each call to AddXformOp() adds a new op to the end of the + \\b xformOpOrder array, as a new "most-local" operation. See + \\ref usdGeom_xformableExamples "Example 2 below" for C++ code that could + have produced this USD. + + If it were important for the prim's rotations to be independently + overridable, we could equivalently (at some performance cost) encode + the transformation also like so: + \\code + float xformOp:rotateX = 30 + float xformOp:rotateY = 60 + float xformOp:rotateZ = 90 + float3 xformOp:scale = (2, 2, 2) + double3 xformOp:translate = (0, 100, 0) + uniform token[] xformOpOrder = [ "xformOp:translate", "xformOp:rotateZ", "xformOp:rotateY", "xformOp:rotateX", "xformOp:scale" ] + \\endcode + + Again, note that although we are encoding an XYZ rotation, the three + rotations appear in the **xformOpOrder** in the opposite order, with Z, + followed, by Y, followed by X. + + Were we to add a Maya-style scalePivot to the above example, it might + look like the following: + \\code + float3 xformOp:rotateXYZ = (30, 60, 90) + float3 xformOp:scale = (2, 2, 2) + double3 xformOp:translate = (0, 100, 0) + double3 xformOp:translate:scalePivot + uniform token[] xformOpOrder = [ "xformOp:translate", "xformOp:rotateXYZ", "xformOp:translate:scalePivot", "xformOp:scale" ] + \\endcode + + Paired "Inverted" Ops + + We have been claiming that the ordered list of ops serves as a set + of instructions to a transform stack, but you may have noticed in the last + example that there is a missing operation - the pivot for the scale op + needs to be applied in its inverse-form as a final (most local) op! In the + AbcGeom::Xform schema, we would have encoded an actual "final" translation + op whose value was authored by the exporter as the negation of the pivot's + value. However, doing so would be brittle in USD, given that each op can + be independently overridden, and the constraint that one attribute must be + maintained as the negation of the other in order for successful + re-importation of the schema cannot be expressed in USD. + + Our solution leverages the **xformOpOrder** member of the schema, which, + in addition to ordering the ops, may also contain one of two special + tokens that address the paired op and "stack resetting" behavior. + + The "paired op" behavior is encoded as an "!invert!" prefix in + \\b xformOpOrder, as the result of an AddXformOp(isInverseOp=True) call. + The \\b xformOpOrder for the last example would look like: + \\code + uniform token[] xformOpOrder = [ "xformOp:translate", "xformOp:rotateXYZ", "xformOp:translate:scalePivot", "xformOp:scale", "!invert!xformOp:translate:scalePivot" ] + \\endcode + + When asked for its value via UsdGeomXformOp::GetOpTransform(), an + "inverted" Op (i.e. the "inverted" half of a set of paired Ops) will fetch + the value of its paired attribute and return its negation. This works for + all op types - an error will be issued if a "transform" type op is singular + and cannot be inverted. When getting the authored value of an inverted op + via UsdGeomXformOp::Get(), the raw, uninverted value of the associated + attribute is returned. + + For the sake of robustness, setting a value on an inverted op is disallowed. + Attempting to set a value on an inverted op will result in a coding error + and no value being set. + + Resetting the Transform Stack + + The other special op/token that can appear in \\em xformOpOrder is + \\em "!resetXformStack!", which, appearing as the first element of + \\em xformOpOrder, indicates this prim should not inherit the transformation + of its namespace parent. See SetResetXformStack() + + Expected Behavior for "Missing" Ops + + If an importer expects Scale-Rotate-Translate operations, but a prim + has only translate and rotate ops authored, the importer should assume + an identity scale. This allows us to optimize the data a bit, if only + a few components of a very rich schema (like Maya's) are authored in the + app. + + \\anchor usdGeom_xformableExamples + Using the C++ API + + #1. Creating a simple transform matrix encoding + \\snippet examples.cpp CreateMatrixWithDefault + + #2. Creating the simple SRT from the example above + \\snippet examples.cpp CreateExampleSRT + + #3. Creating a parameterized SRT with pivot using UsdGeomXformCommonAPI + \\snippet examples.cpp CreateSRTWithDefaults + + #4. Creating a rotate-only pivot transform with animated + rotation and translation + \\snippet examples.cpp CreateAnimatedTransform + +""" +) { + + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Scope "Scope" ( + inherits = + doc = """Scope is the simplest grouping primitive, and does not carry the + baggage of transformability. Note that transforms should inherit down + through a Scope successfully - it is just a guaranteed no-op from a + transformability perspective.""" +) { +} + +class Xform "Xform" ( + inherits = + doc = """Concrete prim schema for a transform, which implements Xformable """ +) { +} + +class "Boundable" ( + inherits = + doc = """Boundable introduces the ability for a prim to persistently + cache a rectilinear, local-space, extent. + + \\section UsdGeom_Boundable_Extent Why Extent and not Bounds ? + Boundable introduces the notion of "extent", which is a cached computation + of a prim's local-space 3D range for its resolved attributes at the + layer and time in which extent is authored. We have found that with + composed scene description, attempting to cache pre-computed bounds at + interior prims in a scene graph is very fragile, given the ease with which + one can author a single attribute in a stronger layer that can invalidate + many authored caches - or with which a re-published, referenced asset can + do the same. + + Therefore, we limit to precomputing (generally) leaf-prim extent, which + avoids the need to read in large point arrays to compute bounds, and + provides UsdGeomBBoxCache the means to efficiently compute and + (session-only) cache intermediate bounds. You are free to compute and + author intermediate bounds into your scenes, of course, which may work + well if you have sufficient locks on your pipeline to guarantee that once + authored, the geometry and transforms upon which they are based will + remain unchanged, or if accuracy of the bounds is not an ironclad + requisite. + + When intermediate bounds are authored on Boundable parents, the child prims + will be pruned from BBox computation; the authored extent is expected to + incorporate all child bounds.""" +) +{ + # XXX: Note this is really a GfRange3f, which is not fully supported + # in Vt I/O. + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), \\em without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) +} + +class "Gprim" ( + inherits = + doc = """Base class for all geometric primitives. + + Gprim encodes basic graphical properties such as \\em doubleSided and + \\em orientation, and provides primvars for "display color" and "display + opacity" that travel with geometry to be used as shader overrides. """ + +) { + color3f[] primvars:displayColor ( + customData = { + string apiName = "displayColor" + } + doc = """It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a \\em displayColor parameter.""" + ) + + float[] primvars:displayOpacity ( + customData = { + string apiName = "displayOpacity" + } + doc = """Companion to \\em displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + + uniform bool doubleSided = false ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + \\em orientation. By doing so they can perform "backface culling" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's \\em doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see \\ref UsdGeom_WindingOrder for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) +} + +class Cube "Cube" ( + inherits = + doc = """Defines a primitive rectilinear cube centered at the origin. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { + double size = 2.0 ( + doc = """Indicates the length of each edge of the cube. If you + author \\em size you must also author \\em extent. + + \\sa GetExtentAttr()""" + ) + + float3[] extent = [(-1.0, -1.0, -1.0), (1.0, 1.0, 1.0)] ( + doc = """Extent is re-defined on Cube only to provide a fallback value. + \\sa UsdGeomGprim::GetExtentAttr().""" + ) + +} + +class Sphere "Sphere" ( + inherits = + doc = """Defines a primitive sphere centered at the origin. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { + double radius = 1.0 ( + doc = """Indicates the sphere's radius. If you + author \\em radius you must also author \\em extent. + + \\sa GetExtentAttr()""" + ) + + float3[] extent = [(-1.0, -1.0, -1.0), (1.0, 1.0, 1.0)] ( + doc = """Extent is re-defined on Sphere only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) +} + +class Cylinder "Cylinder" ( + inherits = + doc = """Defines a primitive cylinder with closed ends, centered at the + origin, whose spine is along the specified \\em axis. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { + double height = 2 ( + doc = """The size of the cylinder's spine along the specified + \\em axis. If you author \\em height you must also author \\em extent. + + \\sa GetExtentAttr()""" + ) + double radius = 1.0 ( + doc = """The radius of the cylinder. If you author \\em radius + you must also author \\em extent. + + \\sa GetExtentAttr()""" + ) + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = """The axis along which the spine of the cylinder is aligned""" + ) + + float3[] extent = [(-1.0, -1.0, -1.0), (1.0, 1.0, 1.0)] ( + doc = """Extent is re-defined on Cylinder only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) +} + +class Capsule "Capsule" ( + inherits = + doc = """Defines a primitive capsule, i.e. a cylinder capped by two half + spheres, centered at the origin, whose spine is along the specified + \\em axis.""" + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { + double height = 1.0 ( + doc = """The size of the capsule's spine along the specified + \\em axis excluding the size of the two half spheres, i.e. + the size of the cylinder portion of the capsule. + If you author \\em height you must also author \\em extent. + \\sa GetExtentAttr()""" + ) + double radius = 0.5 ( + doc = """The radius of the capsule. If you + author \\em radius you must also author \\em extent. + + \\sa GetExtentAttr()""" + ) + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = """The axis along which the spine of the capsule is aligned""" + ) + + float3[] extent = [(-0.5, -0.5, -1.0), (0.5, 0.5, 1.0)] ( + doc = """Extent is re-defined on Capsule only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) +} + +class Cone "Cone" ( + inherits = + doc = """Defines a primitive cone, centered at the origin, whose spine + is along the specified \\em axis, with the apex of the cone pointing + in the direction of the positive axis. + + The fallback values for Cube, Sphere, Cone, and Cylinder are set so that + they all pack into the same volume/bounds.""" + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { + double height = 2.0 ( + doc = """The size of the cone's spine along the specified + \\em axis. If you author \\em height you must also author \\em extent. + + \\sa GetExtentAttr()""" + ) + double radius = 1.0 ( + doc = """The radius of the cone. If you + author \\em radius you must also author \\em extent. + + \\sa GetExtentAttr()""" + ) + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = """The axis along which the spine of the cone is aligned""" + ) + + float3[] extent = [(-1.0, -1.0, -1.0), (1.0, 1.0, 1.0)] ( + doc = """Extent is re-defined on Cone only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) +} + +class Plane "Plane" ( + inherits = + doc = """Defines a primitive plane, centered at the origin, and is defined by + a cardinal axis, width, and length. The plane is double-sided by default. + + The axis of width and length are perpendicular to the plane's \\em axis: + + axis | width | length + ----- | ------ | ------- + X | z-axis | y-axis + Y | x-axis | z-axis + Z | x-axis | y-axis + + """ + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { + uniform bool doubleSided = true ( + doc = """Planes are double-sided by default. Clients may also support + single-sided planes. + + \\sa UsdGeomGprim::GetDoubleSidedAttr()""" + ) + double width = 2.0 ( + doc = """The width of the plane, which aligns to the x-axis when \\em axis is + 'Z' or 'Y', or to the z-axis when \\em axis is 'X'. If you author \\em width + you must also author \\em extent. + + \\sa UsdGeomGprim::GetExtentAttr()""" + ) + double length = 2.0 ( + doc = """The length of the plane, which aligns to the y-axis when \\em axis is + 'Z' or 'X', or to the z-axis when \\em axis is 'Y'. If you author \\em length + you must also author \\em extent. + + \\sa UsdGeomGprim::GetExtentAttr()""" + ) + uniform token axis = "Z" ( + allowedTokens = ["X", "Y", "Z"] + doc = """The axis along which the surface of the plane is aligned. When set + to 'Z' the plane is in the xy-plane; when \\em axis is 'X' the plane is in + the yz-plane, and when \\em axis is 'Y' the plane is in the xz-plane. + + \\sa UsdGeomGprim::GetAxisAttr().""" + ) + float3[] extent = [(-1.0, -1.0, 0.0), (1.0, 1.0, 0.0)] ( + doc = """Extent is re-defined on Plane only to provide a fallback + value. \\sa UsdGeomGprim::GetExtentAttr().""" + ) +} + +class "PointBased" ( + doc = """Base class for all UsdGeomGprims that possess points, + providing common attributes such as normals and velocities.""" + + inherits = + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { + # positional + point3f[] points ( + doc = """The primary geometry attribute for all PointBased + primitives, describes points in (local) space.""" + ) + + vector3f[] velocities ( + doc = """If provided, 'velocities' should be used by renderers to + + compute positions between samples for the 'points' attribute, rather + than interpolating between neighboring 'points' samples. This is the + only reasonable means of computing motion blur for topologically + varying PointBased primitives. It follows that the length of each + 'velocities' sample must match the length of the corresponding + 'points' sample. Velocity is measured in position units per second, + as per most simulation software. To convert to position units per + UsdTimeCode, divide by UsdStage::GetTimeCodesPerSecond(). + + See also \\ref UsdGeom_VelocityInterpolation .""" + ) + + vector3f[] accelerations ( + doc = """If provided, 'accelerations' should be used with + velocities to compute positions between samples for the 'points' + attribute rather than interpolating between neighboring 'points' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + + # shaping + normal3f[] normals ( + doc = """Provide an object-space orientation for individual points, + which, depending on subclass, may define a surface, curve, or free + points. Note that 'normals' should not be authored on any Mesh that + is subdivided, since the subdivision algorithm will define its own + normals. 'normals' is not a generic primvar, but the number of elements + in this attribute will be determined by its 'interpolation'. See + \\ref SetNormalsInterpolation() . If 'normals' and 'primvars:normals' + are both specified, the latter has precedence.""" + ) +} + +class Mesh "Mesh" ( + inherits = + customData = { + string extraIncludes = """ +#include "pxr/usd/usd/timeCode.h" """ + } + doc="""Encodes a mesh with optional subdivision properties and features. + + As a point-based primitive, meshes are defined in terms of points that + are connected into edges and faces. Many references to meshes use the + term 'vertex' in place of or interchangeably with 'points', while some + use 'vertex' to refer to the 'face-vertices' that define a face. To + avoid confusion, the term 'vertex' is intentionally avoided in favor of + 'points' or 'face-vertices'. + + The connectivity between points, edges and faces is encoded using a + common minimal topological description of the faces of the mesh. Each + face is defined by a set of face-vertices using indices into the Mesh's + _points_ array (inherited from UsdGeomPointBased) and laid out in a + single linear _faceVertexIndices_ array for efficiency. A companion + _faceVertexCounts_ array provides, for each face, the number of + consecutive face-vertices in _faceVertexIndices_ that define the face. + No additional connectivity information is required or constructed, so + no adjacency or neighborhood queries are available. + + A key property of this mesh schema is that it encodes both subdivision + surfaces and simpler polygonal meshes. This is achieved by varying the + _subdivisionScheme_ attribute, which is set to specify Catmull-Clark + subdivision by default, so polygonal meshes must always be explicitly + declared. The available subdivision schemes and additional subdivision + features encoded in optional attributes conform to the feature set of + OpenSubdiv + (https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html). + + \\anchor UsdGeom_Mesh_Primvars + __A Note About Primvars__ + + The following list clarifies the number of elements for and the + interpolation behavior of the different primvar interpolation types + for meshes: + + - __constant__: One element for the entire mesh; no interpolation. + - __uniform__: One element for each face of the mesh; elements are + typically not interpolated but are inherited by other faces derived + from a given face (via subdivision, tessellation, etc.). + - __varying__: One element for each point of the mesh; + interpolation of point data is always linear. + - __vertex__: One element for each point of the mesh; + interpolation of point data is applied according to the + _subdivisionScheme_ attribute. + - __faceVarying__: One element for each of the face-vertices that + define the mesh topology; interpolation of face-vertex data may + be smooth or linear, according to the _subdivisionScheme_ and + _faceVaryingLinearInterpolation_ attributes. + + Primvar interpolation types and related utilities are described more + generally in \\ref Usd_InterpolationVals. + + \\anchor UsdGeom_Mesh_Normals + __A Note About Normals__ + + Normals should not be authored on a subdivision mesh, since subdivision + algorithms define their own normals. They should only be authored for + polygonal meshes (_subdivisionScheme_ = "none"). + + The _normals_ attribute inherited from UsdGeomPointBased is not a generic + primvar, but the number of elements in this attribute will be determined by + its _interpolation_. See \\ref UsdGeomPointBased::GetNormalsInterpolation() . + If _normals_ and _primvars:normals_ are both specified, the latter has + precedence. If a polygonal mesh specifies __neither__ _normals_ nor + _primvars:normals_, then it should be treated and rendered as faceted, + with no attempt to compute smooth normals. + + The normals generated for smooth subdivision schemes, e.g. Catmull-Clark + and Loop, will likewise be smooth, but others, e.g. Bilinear, may be + discontinuous between faces and/or within non-planar irregular faces.""" +) { + # + # Common Properties + # + int[] faceVertexIndices ( + doc = """Flat list of the index (into the _points_ attribute) of each + vertex of each face in the mesh. If this attribute has more than + one timeSample, the mesh is considered to be topologically varying.""" + ) + + int[] faceVertexCounts ( + doc = """Provides the number of vertices in each face of the mesh, + which is also the number of consecutive indices in _faceVertexIndices_ + that define the face. The length of this attribute is the number of + faces in the mesh. If this attribute has more than + one timeSample, the mesh is considered to be topologically varying.""" + ) + + # + # Subdiv Properties + # + + uniform token subdivisionScheme = "catmullClark" ( + allowedTokens = ["catmullClark", "loop", "bilinear", "none"] + doc = """The subdivision scheme to be applied to the surface. + Valid values are: + + - __catmullClark__: The default, Catmull-Clark subdivision; preferred + for quad-dominant meshes (generalizes B-splines); interpolation + of point data is smooth (non-linear) + - __loop__: Loop subdivision; preferred for purely triangular meshes; + interpolation of point data is smooth (non-linear) + - __bilinear__: Subdivision reduces all faces to quads (topologically + similar to "catmullClark"); interpolation of point data is bilinear + - __none__: No subdivision, i.e. a simple polygonal mesh; interpolation + of point data is linear + + Polygonal meshes are typically lighter weight and faster to render, + depending on renderer and render mode. Use of "bilinear" will produce + a similar shape to a polygonal mesh and may offer additional guarantees + of watertightness and additional subdivision features (e.g. holes) but + may also not respect authored normals.""") + + token interpolateBoundary = "edgeAndCorner" ( + allowedTokens = ["none", "edgeOnly", "edgeAndCorner"] + doc = """Specifies how subdivision is applied for faces adjacent to + boundary edges and boundary points. Valid values correspond to choices + available in OpenSubdiv: + + - __none__: No boundary interpolation is applied and boundary faces are + effectively treated as holes + - __edgeOnly__: A sequence of boundary edges defines a smooth curve to + which the edges of subdivided boundary faces converge + - __edgeAndCorner__: The default, similar to "edgeOnly" but the smooth + boundary curve is made sharp at corner points + + These are illustrated and described in more detail in the OpenSubdiv + documentation: + https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html#boundary-interpolation-rules""") + + token faceVaryingLinearInterpolation = "cornersPlus1" ( + allowedTokens = ["none", "cornersOnly", "cornersPlus1", + "cornersPlus2", "boundaries", "all"] + doc = """Specifies how elements of a primvar of interpolation type + "faceVarying" are interpolated for subdivision surfaces. Interpolation + can be as smooth as a "vertex" primvar or constrained to be linear at + features specified by several options. Valid values correspond to + choices available in OpenSubdiv: + + - __none__: No linear constraints or sharpening, smooth everywhere + - __cornersOnly__: Sharpen corners of discontinuous boundaries only, + smooth everywhere else + - __cornersPlus1__: The default, same as "cornersOnly" plus additional + sharpening at points where three or more distinct face-varying + values occur + - __cornersPlus2__: Same as "cornersPlus1" plus additional sharpening + at points with at least one discontinuous boundary corner or + only one discontinuous boundary edge (a dart) + - __boundaries__: Piecewise linear along discontinuous boundaries, + smooth interior + - __all__: Piecewise linear everywhere + + These are illustrated and described in more detail in the OpenSubdiv + documentation: + https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html#face-varying-interpolation-rules""") + + token triangleSubdivisionRule = "catmullClark" ( + allowedTokens = ["catmullClark", "smooth"] + doc = """Specifies an option to the subdivision rules for the + Catmull-Clark scheme to try and improve undesirable artifacts when + subdividing triangles. Valid values are "catmullClark" for the + standard rules (the default) and "smooth" for the improvement. + + See https://graphics.pixar.com/opensubdiv/docs/subdivision_surfaces.html#triangle-subdivision-rule""") + + int[] holeIndices = [] ( + doc = """The indices of all faces that should be treated as holes, + i.e. made invisible. This is traditionally a feature of subdivision + surfaces and not generally applied to polygonal meshes.""") + + int[] cornerIndices = [] ( + doc = """The indices of points for which a corresponding sharpness + value is specified in _cornerSharpnesses_ (so the size of this array + must match that of _cornerSharpnesses_).""") + + float[] cornerSharpnesses = [] ( + doc = """The sharpness values associated with a corresponding set of + points specified in _cornerIndices_ (so the size of this array must + match that of _cornerIndices_). Use the constant `SHARPNESS_INFINITE` + for a perfectly sharp corner.""") + + int[] creaseIndices = [] ( + doc = """The indices of points grouped into sets of successive pairs + that identify edges to be creased. The size of this array must be + equal to the sum of all elements of the _creaseLengths_ attribute.""") + + int[] creaseLengths = [] ( + doc = """The length of this array specifies the number of creases + (sets of adjacent sharpened edges) on the mesh. Each element gives + the number of points of each crease, whose indices are successively + laid out in the _creaseIndices_ attribute. Since each crease must + be at least one edge long, each element of this array must be at + least two.""") + + float[] creaseSharpnesses = [] ( + doc = """The per-crease or per-edge sharpness values for all creases. + Since _creaseLengths_ encodes the number of points in each crease, + the number of elements in this array will be either len(creaseLengths) + or the sum over all X of (creaseLengths[X] - 1). Note that while + the RI spec allows each crease to have either a single sharpness + or a value per-edge, USD will encode either a single sharpness + per crease on a mesh, or sharpnesses for all edges making up + the creases on a mesh. Use the constant `SHARPNESS_INFINITE` for a + perfectly sharp crease.""") +} + +class GeomSubset "GeomSubset" ( + inherits = + doc = """Encodes a subset of a piece of geometry (i.e. a UsdGeomImageable) + as a set of indices. Currently only supports encoding of face-subsets, but + could be extended in the future to support subsets representing edges, + segments, points etc. + + To apply to a geometric prim, a GeomSubset prim must be the prim's direct + child in namespace, and possess a concrete defining specifier (i.e. def). + This restriction makes it easy and efficient to discover subsets of a prim. + We might want to relax this restriction if it's common to have multiple + families of subsets on a gprim and if it's useful to be able to + organize subsets belonging to a family under a common scope. See + 'familyName' attribute for more info on defining a family of subsets. + + Note that a GeomSubset isn't an imageable (i.e. doesn't derive from + UsdGeomImageable). So, you can't author visibility for it or + override its purpose. + + Materials are bound to GeomSubsets just as they are for regular + geometry using API available in UsdShade (UsdShadeMaterial::Bind). +""" + customData = { + string className = "Subset" + string extraIncludes = """ +#include "pxr/base/tf/token.h" +#include "pxr/usd/usdGeom/imageable.h" +""" + } +) +{ + uniform token elementType = "face" ( + allowedTokens = ["face"] + doc = """The type of element that the indices target. Currently only + allows "face" and defaults to it.""" + ) + int[] indices = [] ( + doc = """The set of indices included in this subset. The indices need not + be sorted, but the same index should not appear more than once.""" + ) + uniform token familyName = "" ( + doc = """The name of the family of subsets that this subset belongs to. + This is optional and is primarily useful when there are multiple + families of subsets under a geometric prim. In some cases, this could + also be used for achieving proper roundtripping of subset data between + DCC apps. + When multiple subsets belonging to a prim have the same familyName, they + are said to belong to the family. A familyType value can be + encoded on the owner of a family of subsets as a token using the static + method UsdGeomSubset::SetFamilyType(). "familyType" can have one of the + following values: +
  • UsdGeomTokens->partition: implies that every element of + the whole geometry appears exactly once in only one of the subsets + belonging to the family.
  • +
  • UsdGeomTokens->nonOverlapping: an element that appears in one + subset may not appear in any other subset belonging to the family.
  • +
  • UsdGeomTokens->unrestricted: implies that there are no + restrictions w.r.t. the membership of elements in the subsets. They + could be overlapping and the union of all subsets in the family may + not represent the whole.
  • +
+ \\note The validity of subset data is not enforced by the authoring + APIs, however they can be checked using UsdGeomSubset::ValidateFamily(). + """ + ) +} + +class NurbsPatch "NurbsPatch" ( + inherits = + doc = """Encodes a rational or polynomial non-uniform B-spline + surface, with optional trim curves. + + The encoding mostly follows that of RiNuPatch and RiTrimCurve: + https://renderman.pixar.com/resources/current/RenderMan/geometricPrimitives.html#rinupatch , with some minor renaming and coalescing for clarity. + + The layout of control vertices in the \\em points attribute inherited + from UsdGeomPointBased is row-major with U considered rows, and V columns. + + \\anchor UsdGeom_NurbsPatch_Form + NurbsPatch Form + + The authored points, orders, knots, weights, and ranges are all that is + required to render the nurbs patch. However, the only way to model closed + surfaces with nurbs is to ensure that the first and last control points + along the given axis are coincident. Similarly, to ensure the surface is + not only closed but also C2 continuous, the last \\em order - 1 control + points must be (correspondingly) coincident with the first \\em order - 1 + control points, and also the spacing of the last corresponding knots + must be the same as the first corresponding knots. + + Form is provided as an aid to interchange between modeling and + animation applications so that they can robustly identify the intent with + which the surface was modelled, and take measures (if they are able) to + preserve the continuity/concidence constraints as the surface may be rigged + or deformed. + \\li An \\em open-form NurbsPatch has no continuity constraints. + \\li A \\em closed-form NurbsPatch expects the first and last control points + to overlap + \\li A \\em periodic-form NurbsPatch expects the first and last + \\em order - 1 control points to overlap. + + Nurbs vs Subdivision Surfaces + + Nurbs are an important modeling primitive in CAD/CAM tools and early + computer graphics DCC's. Because they have a natural UV parameterization + they easily support "trim curves", which allow smooth shapes to be + carved out of the surface. + + However, the topology of the patch is always rectangular, and joining two + nurbs patches together (especially when they have differing numbers of + spans) is difficult to do smoothly. Also, nurbs are not supported by + the Ptex texturing technology (http://ptex.us). + + Neither of these limitations are shared by subdivision surfaces; therefore, + although they do not subscribe to trim-curve-based shaping, subdivs are + often considered a more flexible modeling primitive. + """ +) { + int uVertexCount ( + doc = """Number of vertices in the U direction. Should be at least as + large as uOrder.""" + ) + + int vVertexCount ( + doc = """Number of vertices in the V direction. Should be at least as + large as vOrder.""" + ) + + int uOrder ( + doc = """Order in the U direction. Order must be positive and is + equal to the degree of the polynomial basis to be evaluated, plus 1.""" + ) + + int vOrder ( + doc = """Order in the V direction. Order must be positive and is + equal to the degree of the polynomial basis to be evaluated, plus 1.""" + ) + + double[] uKnots ( + doc = """Knot vector for U direction providing U parameterization. + The length of this array must be ( uVertexCount + uOrder ), and its + entries must take on monotonically increasing values.""" + ) + + double[] vKnots ( + doc = """Knot vector for V direction providing U parameterization. + The length of this array must be ( vVertexCount + vOrder ), and its + entries must take on monotonically increasing values.""" + ) + + uniform token uForm = "open" ( + allowedTokens = ["open", "closed", "periodic"] + doc = """Interpret the control grid and knot vectors as representing + an open, geometrically closed, or geometrically closed and C2 continuous + surface along the U dimension. + \\sa \\ref UsdGeom_NurbsPatch_Form "NurbsPatch Form" """ + ) + + uniform token vForm = "open" ( + allowedTokens = ["open", "closed", "periodic"] + doc = """Interpret the control grid and knot vectors as representing + an open, geometrically closed, or geometrically closed and C2 continuous + surface along the V dimension. + \\sa \\ref UsdGeom_NurbsPatch_Form "NurbsPatch Form" """ + ) + + # Alembic's NuPatch does not encode these... wonder how they + # get away with that? Just assume it's the full range, presumably. + double2 uRange ( + doc = """Provides the minimum and maximum parametric values (as defined + by uKnots) over which the surface is actually defined. The minimum + must be less than the maximum, and greater than or equal to the + value of uKnots[uOrder-1]. The maxium must be less than or equal + to the last element's value in uKnots.""" + ) + + double2 vRange ( + doc = """Provides the minimum and maximum parametric values (as defined + by vKnots) over which the surface is actually defined. The minimum + must be less than the maximum, and greater than or equal to the + value of vKnots[vOrder-1]. The maxium must be less than or equal + to the last element's value in vKnots.""" + ) + + double[] pointWeights ( + doc = """Optionally provides "w" components for each control point, + thus must be the same length as the points attribute. If authored, + the patch will be rational. If unauthored, the patch will be + polynomial, i.e. weight for all points is 1.0. + \\note Some DCC's pre-weight the \\em points, but in this schema, + \\em points are not pre-weighted.""" + ) + + int[] trimCurve:counts ( + doc = """Each element specifies how many curves are present in each + "loop" of the trimCurve, and the length of the array determines how + many loops the trimCurve contains. The sum of all elements is the + total nuber of curves in the trim, to which we will refer as + \\em nCurves in describing the other trim attributes.""" + ) + + int[] trimCurve:orders ( + doc = """Flat list of orders for each of the \\em nCurves curves.""" + ) + + int[] trimCurve:vertexCounts ( + doc = """Flat list of number of vertices for each of the + \\em nCurves curves.""" + ) + + double[] trimCurve:knots ( + doc = """Flat list of parametric values for each of the + \\em nCurves curves. There will be as many knots as the sum over + all elements of \\em vertexCounts plus the sum over all elements of + \\em orders.""" + ) + + double2[] trimCurve:ranges ( + doc = """Flat list of minimum and maximum parametric values + (as defined by \\em knots) for each of the \\em nCurves curves.""" + ) + + double3[] trimCurve:points ( + doc = """Flat list of homogeneous 2D points (u, v, w) that comprise + the \\em nCurves curves. The number of points should be equal to the + um over all elements of \\em vertexCounts.""" + ) + +} + +class "Curves" ( + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } + inherits = + doc = """Base class for UsdGeomBasisCurves, UsdGeomNurbsCurves, and + UsdGeomHermiteCurves. The BasisCurves schema is designed to be + analagous to offline renderers' notion of batched curves (such as + the classical RIB definition via Basis and Curves statements), + while the NurbsCurve schema is designed to be analgous to the + NURBS curves found in packages like Maya and Houdini while + retaining their consistency with the RenderMan specification for + NURBS Patches. HermiteCurves are useful for the + interchange of animation guides and paths. + + It is safe to use the length of the curve vertex count to derive + the number of curves and the number and layout of curve vertices, + but this schema should NOT be used to derive the number of curve + points. While vertex indices are implicit in all shipped + descendent types of this schema, one should not assume that all + internal or future shipped schemas will follow this pattern. Be + sure to key any indexing behavior off the concrete type, not this + abstract type. + """ +) { + # topology attributes + int[] curveVertexCounts ( + doc = """Curves-derived primitives can represent multiple distinct, + potentially disconnected curves. The length of 'curveVertexCounts' + gives the number of such curves, and each element describes the + number of vertices in the corresponding curve""" + ) + + # shaping attributes + float[] widths ( + doc = """Provides width specification for the curves, whose application + will depend on whether the curve is oriented (normals are defined for + it), in which case widths are "ribbon width", or unoriented, in which + case widths are cylinder width. 'widths' is not a generic Primvar, + but the number of elements in this attribute will be determined by + its 'interpolation'. See \\ref SetWidthsInterpolation() . If 'widths' + and 'primvars:widths' are both specified, the latter has precedence.""" + ) +} + +class BasisCurves "BasisCurves" ( + inherits = + doc = """BasisCurves are a batched curve representation analogous to the + classic RIB definition via Basis and Curves statements. BasisCurves are + often used to render dense aggregate geometry like hair or grass. + + A 'matrix' and 'vstep' associated with the \\em basis are used to + interpolate the vertices of a cubic BasisCurves. (The basis attribute + is unused for linear BasisCurves.) + + A single prim may have many curves whose count is determined implicitly by + the length of the \\em curveVertexCounts vector. Each individual curve is + composed of one or more segments. Each segment is defined by four vertices + for cubic curves and two vertices for linear curves. See the next section + for more information on how to map curve vertex counts to segment counts. + + \\section UsdGeomBasisCurves_Segment Segment Indexing + Interpolating a curve requires knowing how to decompose it into its + individual segments. + + The segments of a cubic curve are determined by the vertex count, + the \\em wrap (periodicity), and the vstep of the basis. For linear + curves, the basis token is ignored and only the vertex count and + wrap are needed. + + cubic basis | vstep + ------------- | ------ + bezier | 3 + catmullRom | 1 + bspline | 1 + + The first segment of a cubic (nonperiodic) curve is always defined by its + first four points. The vstep is the increment used to determine what + vertex indices define the next segment. For a two segment (nonperiodic) + bspline basis curve (vstep = 1), the first segment will be defined by + interpolating vertices [0, 1, 2, 3] and the second segment will be defined + by [1, 2, 3, 4]. For a two segment bezier basis curve (vstep = 3), the + first segment will be defined by interpolating vertices [0, 1, 2, 3] and + the second segment will be defined by [3, 4, 5, 6]. If the vstep is not + one, then you must take special care to make sure that the number of cvs + properly divides by your vstep. (The indices described are relative to + the initial vertex index for a batched curve.) + + For periodic curves, at least one of the curve's initial vertices are + repeated to close the curve. For cubic curves, the number of vertices + repeated is '4 - vstep'. For linear curves, only one vertex is repeated + to close the loop. + + Pinned curves are a special case of nonperiodic curves that only affects + the behavior of cubic Bspline and Catmull-Rom curves. To evaluate or render + pinned curves, a client must effectively add 'phantom points' at the + beginning and end of every curve in a batch. These phantom points + are injected to ensure that the interpolated curve begins at P[0] and + ends at P[n-1]. + + For a curve with initial point P[0] and last point P[n-1], the phantom + points are defined as. + P[-1] = 2 * P[0] - P[1] + P[n] = 2 * P[n-1] - P[n-2] + + Pinned cubic curves will (usually) have to be unpacked into the standard + nonperiodic representation before rendering. This unpacking can add some + additional overhead. However, using pinned curves reduces the amount of + data recorded in a scene and (more importantly) better records the + authors' intent for interchange. + + \\note The additional phantom points mean that the minimum curve vertex + count for cubic bspline and catmullRom curves is 2. + + Linear curve segments are defined by two vertices. + A two segment linear curve's first segment would be defined by + interpolating vertices [0, 1]. The second segment would be defined by + vertices [1, 2]. (Again, for a batched curve, indices are relative to + the initial vertex index.) + + When validating curve topology, each renderable entry in the + curveVertexCounts vector must pass this check. + + type | wrap | validitity + ------- | --------------------------- | ---------------- + linear | nonperiodic | curveVertexCounts[i] > 2 + linear | periodic | curveVertexCounts[i] > 3 + cubic | nonperiodic | (curveVertexCounts[i] - 4) % vstep == 0 + cubic | periodic | (curveVertexCounts[i]) % vstep == 0 + cubic | pinned (catmullRom/bspline) | (curveVertexCounts[i] - 2) >= 0 + + \\section UsdGeomBasisCurves_BasisMatrix Cubic Vertex Interpolation + + \\image html USDCurveBasisMatrix.png width=750 + + \\section UsdGeomBasisCurves_Linear Linear Vertex Interpolation + + Linear interpolation is always used on curves of type linear. + 't' with domain [0, 1], the curve is defined by the equation + P0 * (1-t) + P1 * t. t at 0 describes the first point and t at 1 describes + the end point. + + \\section UsdGeomBasisCurves_PrimvarInterpolation Primvar Interpolation + + For cubic curves, primvar data can be either interpolated cubically between + vertices or linearly across segments. The corresponding token + for cubic interpolation is 'vertex' and for linear interpolation is + 'varying'. Per vertex data should be the same size as the number + of vertices in your curve. Segment varying data is dependent on the + wrap (periodicity) and number of segments in your curve. For linear curves, + varying and vertex data would be interpolated the same way. By convention + varying is the preferred interpolation because of the association of + varying with linear interpolation. + + \\image html USDCurvePrimvars.png + + To convert an entry in the curveVertexCounts vector into a segment count + for an individual curve, apply these rules. Sum up all the results in + order to compute how many total segments all curves have. + + The following tables describe the expected segment count for the 'i'th + curve in a curve batch as well as the entire batch. Python syntax + like '[:]' (to describe all members of an array) and 'len(...)' + (to describe the length of an array) are used. + + type | wrap | curve segment count | batch segment count + ------- | --------------------------- | -------------------------------------- | -------------------------- + linear | nonperiodic | curveVertexCounts[i] - 1 | sum(curveVertexCounts[:]) - len(curveVertexCounts) + linear | periodic | curveVertexCounts[i] | sum(curveVertexCounts[:]) + cubic | nonperiodic | (curveVertexCounts[i] - 4) / vstep + 1 | sum(curveVertexCounts[:] - 4) / vstep + len(curveVertexCounts) + cubic | periodic | curveVertexCounts[i] / vstep | sum(curveVertexCounts[:]) / vstep + cubic | pinned (catmullRom/bspline) | (curveVertexCounts[i] - 2) + 1 | sum(curveVertexCounts[:] - 2) + len(curveVertexCounts) + + The following table descrives the expected size of varying + (linearly interpolated) data, derived from the segment counts computed + above. + + wrap | curve varying count | batch varying count + ------------------- | ---------------------------- | ------------------------------------------------ + nonperiodic/pinned | segmentCounts[i] + 1 | sum(segmentCounts[:]) + len(curveVertexCounts) + periodic | segmentCounts[i] | sum(segmentCounts[:]) + + Both curve types additionally define 'constant' interpolation for the + entire prim and 'uniform' interpolation as per curve data. + + + \\note Take care when providing support for linearly interpolated data for + cubic curves. Its shape doesn't provide a one to one mapping with either + the number of curves (like 'uniform') or the number of vertices (like + 'vertex') and so it is often overlooked. This is the only primitive in + UsdGeom (as of this writing) where this is true. For meshes, while they + use different interpolation methods, 'varying' and 'vertex' are both + specified per point. It's common to assume that curves follow a similar + pattern and build in structures and language for per primitive, per + element, and per point data only to come upon these arrays that don't + quite fit into either of those categories. It is + also common to conflate 'varying' with being per segment data and use the + segmentCount rules table instead of its neighboring varying data table + rules. We suspect that this is because for the common case of + nonperiodic cubic curves, both the provided segment count and varying data + size formula end with '+ 1'. While debugging, users may look at the double + '+ 1' as a mistake and try to remove it. We take this time to enumerate + these issues because we've fallen into them before and hope that we save + others time in their own implementations. + + As an example of deriving per curve segment and varying primvar data counts from + the wrap, type, basis, and curveVertexCount, the following table is provided. + + wrap | type | basis | curveVertexCount | curveSegmentCount | varyingDataCount + ------------- | ------- | ------- | ----------------- | ------------------ | ------------------------- + nonperiodic | linear | N/A | [2 3 2 5] | [1 2 1 4] | [2 3 2 5] + nonperiodic | cubic | bezier | [4 7 10 4 7] | [1 2 3 1 2] | [2 3 4 2 3] + nonperiodic | cubic | bspline | [5 4 6 7] | [2 1 3 4] | [3 2 4 5] + periodic | cubic | bezier | [6 9 6] | [2 3 2] | [2 3 2] + periodic | linear | N/A | [3 7] | [3 7] | [3 7] + + \\section UsdGeomBasisCurves_TubesAndRibbons Tubes and Ribbons + + The strictest definition of a curve as an infinitely thin wire is not + particularly useful for describing production scenes. The additional + \\em widths and \\em normals attributes can be used to describe cylindrical + tubes and or flat oriented ribbons. + + Curves with only widths defined are imaged as tubes with radius + 'width / 2'. Curves with both widths and normals are imaged as ribbons + oriented in the direction of the interpolated normal vectors. + + While not technically UsdGeomPrimvars, widths and normals + also have interpolation metadata. It's common for authored widths to have + constant, varying, or vertex interpolation + (see UsdGeomCurves::GetWidthsInterpolation()). It's common for + authored normals to have varying interpolation + (see UsdGeomPointBased::GetNormalsInterpolation()). + + \\image html USDCurveHydra.png + + The file used to generate these curves can be found in + extras/usd/examples/usdGeomExamples/basisCurves.usda. It's provided + as a reference on how to properly image both tubes and ribbons. The first + row of curves are linear; the second are cubic bezier. (We aim in future + releases of HdSt to fix the discontinuity seen with broken tangents to + better match offline renderers like RenderMan.) The yellow and violet + cubic curves represent cubic vertex width interpolation for which there is + no equivalent for linear curves. + + \\note How did this prim type get its name? This prim is a portmanteau of + two different statements in the original RenderMan specification: + 'Basis' and 'Curves'. +""" +) { + # interpolation attributes + uniform token type = "cubic" ( + allowedTokens = ["linear", "cubic"] + doc = """Linear curves interpolate linearly between two vertices. + Cubic curves use a basis matrix with four vertices to interpolate a segment.""") + + uniform token basis = "bezier" ( + allowedTokens = ["bezier", "bspline", "catmullRom"] + doc = """The basis specifies the vstep and matrix used for cubic + interpolation. \\note The 'hermite' and 'power' tokens have been + removed. We've provided UsdGeomHermiteCurves + as an alternative for the 'hermite' basis.""") + + uniform token wrap = "nonperiodic" ( + allowedTokens = ["nonperiodic", "periodic", "pinned"] + doc = """If wrap is set to periodic, the curve when rendered will + repeat the initial vertices (dependent on the vstep) to close the + curve. If wrap is set to 'pinned', phantom points may be created + to ensure that the curve interpolation starts at P[0] and ends at P[n-1]. + """) +} + +class NurbsCurves "NurbsCurves" ( + inherits = + doc = """This schema is analagous to NURBS Curves in packages like Maya + and Houdini, often used for interchange of rigging and modeling curves. + Unlike Maya, this curve spec supports batching of multiple curves into a + single prim, widths, and normals in the schema. Additionally, we require + 'numSegments + 2 * degree + 1' knots (2 more than maya does). This is to + be more consistent with RenderMan's NURBS patch specification. + + To express a periodic curve: + - knot[0] = knot[1] - (knots[-2] - knots[-3]; + - knot[-1] = knot[-2] + (knot[2] - knots[1]); + + To express a nonperiodic curve: + - knot[0] = knot[1]; + - knot[-1] = knot[-2]; + + In spite of these slight differences in the spec, curves generated in Maya + should be preserved when roundtripping. + + \\em order and \\em range, when representing a batched NurbsCurve should be + authored one value per curve. \\em knots should be the concatentation of + all batched curves.""" +) { + # topology attributes + int[] order = [] ( + doc = """Order of the curve. Order must be positive and is + equal to the degree of the polynomial basis to be evaluated, plus 1. + Its value for the 'i'th curve must be less than or equal to + curveVertexCount[i]""") + + # interpolation attributes + double[] knots ( + doc = """Knot vector providing curve parameterization. + The length of the slice of the array for the ith curve + must be ( curveVertexCount[i] + order[i] ), and its + entries must take on monotonically increasing values.""") + + double2[] ranges ( + doc = """Provides the minimum and maximum parametric values (as defined + by knots) over which the curve is actually defined. The minimum must + be less than the maximum, and greater than or equal to the value of the + knots['i'th curve slice][order[i]-1]. The maxium must be less + than or equal to the last element's value in knots['i'th curve slice]. + Range maps to (vmin, vmax) in the RenderMan spec.""") + + double[] pointWeights ( + doc = """Optionally provides "w" components for each control point, + thus must be the same length as the points attribute. If authored, + the curve will be rational. If unauthored, the curve will be + polynomial, i.e. weight for all points is 1.0. + \\note Some DCC's pre-weight the \\em points, but in this schema, + \\em points are not pre-weighted.""" + ) +} + +class Points "Points" ( + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } + inherits = + doc = """Points are analogous to the RiPoints spec. + + Points can be an efficient means of storing and rendering particle + effects comprised of thousands or millions of small particles. Points + generally receive a single shading sample each, which should take + \\em normals into account, if present. + + While not technically UsdGeomPrimvars, the widths and normals also + have interpolation metadata. It's common for authored widths and normals + to have constant or varying interpolation.""" + +) { + # shaping attributes + float[] widths ( + doc = """Widths are defined as the \\em diameter of the points, in + object space. 'widths' is not a generic Primvar, but + the number of elements in this attribute will be determined by + its 'interpolation'. See \\ref SetWidthsInterpolation() . If + 'widths' and 'primvars:widths' are both specified, the latter + has precedence.""" + ) + + int64[] ids ( + doc = """Ids are optional; if authored, the ids array should be the same + length as the points array, specifying (at each timesample if + point identities are changing) the id of each point. The + type is signed intentionally, so that clients can encode some + binary state on Id'd points without adding a separate + primvar.""" + ) +} + +class PointInstancer "PointInstancer" ( + doc = """Encodes vectorized instancing of multiple, potentially + animated, prototypes (object/instance masters), which can be arbitrary + prims/subtrees on a UsdStage. + + PointInstancer is a "multi instancer", as it allows multiple prototypes + to be scattered among its "points". We use a UsdRelationship + \\em prototypes to identify and order all of the possible prototypes, by + targeting the root prim of each prototype. The ordering imparted by + relationships associates a zero-based integer with each prototype, and + it is these integers we use to identify the prototype of each instance, + compactly, and allowing prototypes to be swapped out without needing to + reauthor all of the per-instance data. + + The PointInstancer schema is designed to scale to billions of instances, + which motivates the choice to split the per-instance transformation into + position, (quaternion) orientation, and scales, rather than a + 4x4 matrix per-instance. In addition to requiring fewer bytes even if + all elements are authored (32 bytes vs 64 for a single-precision 4x4 + matrix), we can also be selective about which attributes need to animate + over time, for substantial data reduction in many cases. + + Note that PointInstancer is \\em not a Gprim, since it is not a graphical + primitive by any stretch of the imagination. It \\em is, however, + Boundable, since we will sometimes want to treat the entire PointInstancer + similarly to a procedural, from the perspective of inclusion or framing. + + \\section UsdGeomPointInstancer_varyingTopo Varying Instance Identity over Time + + PointInstancers originating from simulations often have the characteristic + that points/instances are "born", move around for some time period, and then + die (or leave the area of interest). In such cases, billions of instances + may be birthed over time, while at any \\em specific time, only a much + smaller number are actually alive. To encode this situation efficiently, + the simulator may re-use indices in the instance arrays, when a particle + dies, its index will be taken over by a new particle that may be birthed in + a much different location. This presents challenges both for + identity-tracking, and for motion-blur. + + We facilitate identity tracking by providing an optional, animatable + \\em ids attribute, that specifies the 64 bit integer ID of the particle + at each index, at each point in time. If the simulator keeps monotonically + increasing a particle-count each time a new particle is birthed, it will + serve perfectly as particle \\em ids. + + We facilitate motion blur for varying-topology particle streams by + optionally allowing per-instance \\em velocities and \\em angularVelocities + to be authored. If instance transforms are requested at a time between + samples and either of the velocity attributes is authored, then we will + not attempt to interpolate samples of \\em positions or \\em orientations. + If not authored, and the bracketing samples have the same length, then we + will interpolate. + + \\section UsdGeomPointInstancer_transform Computing an Instance Transform + + Each instance's transformation is a combination of the SRT affine transform + described by its scale, orientation, and position, applied \\em after + (i.e. less locally than) the local to parent transformation computed at + the root of the prototype it is instancing. + + If your processing of prototype geometry naturally takes into account the + transform of the prototype root, then this term can be omitted from the + computation of each instance transform, and this can be controlled when + computing instance transformation matrices using the + UsdGeomPointInstancer::PrototypeXformInclusion enumeration. + + To understand the computation of the instance transform, in order to put + an instance of a PointInstancer into the space of the PointInstancer's + parent prim we do the following: + + 1. Apply (most locally) the authored local to parent transformation for + prototypes[protoIndices[i]] + 2. If *scales* is authored, next apply the scaling matrix from *scales[i]* + 3. If *orientations* is authored: **if *angularVelocities* is authored**, + first multiply *orientations[i]* by the unit quaternion derived by scaling + *angularVelocities[i]* by the \\ref UsdGeom_PITimeScaling "time differential" + from the left-bracketing timeSample for *orientation* to the requested + evaluation time *t*, storing the result in *R*, **else** assign *R* + directly from *orientations[i]*. Apply the rotation matrix derived + from *R*. + 4. Apply the translation derived from *positions[i]*. If *velocities* is + authored, apply the translation deriving from *velocities[i]* scaled by + the time differential from the left-bracketing timeSample for *positions* + to the requested evaluation time *t*. + 5. Least locally, apply the transformation authored on the PointInstancer + prim itself (or the UsdGeomImageable::ComputeLocalToWorldTransform() of the + PointInstancer to put the instance directly into world space) + + If neither *velocities* nor *angularVelocities* are authored, we fallback to + standard position and orientation computation logic (using linear + interpolation between timeSamples) as described by + \\ref UsdGeom_VelocityInterpolation . + + \\anchor UsdGeom_PITimeScaling + Scaling Velocities for Interpolation + + When computing time-differentials by which to apply velocity or + angularVelocity to positions or orientations, we must scale by + ( 1.0 / UsdStage::GetTimeCodesPerSecond() ), because velocities are recorded + in units/second, while we are interpolating in UsdTimeCode ordinates. + + We provide both high and low-level API's for dealing with the + transformation as a matrix, both will compute the instance matrices using + multiple threads; the low-level API allows the client to cache unvarying + inputs so that they need not be read duplicately when computing over + time. + + See also \\ref UsdGeom_VelocityInterpolation . + + \\section UsdGeomPointInstancer_primvars Primvars on PointInstancer + + \\ref UsdGeomPrimvar "Primvars" authored on a PointInstancer prim should + always be applied to each instance with \\em constant interpolation at + the root of the instance. When you are authoring primvars on a + PointInstancer, think about it as if you were authoring them on a + point-cloud (e.g. a UsdGeomPoints gprim). The same + interpolation rules for points apply here, substituting + "instance" for "point". + + In other words, the (constant) value extracted for each instance + from the authored primvar value depends on the authored \\em interpolation + and \\em elementSize of the primvar, as follows: + \\li constant or uniform : the entire authored value of the + primvar should be applied exactly to each instance. + \\li varying, vertex, or faceVarying: the first + \\em elementSize elements of the authored primvar array should be assigned to + instance zero, the second \\em elementSize elements should be assigned to + instance one, and so forth. + + + \\section UsdGeomPointInstancer_masking Masking Instances: "Deactivating" and Invising + + Often a PointInstancer is created "upstream" in a graphics pipeline, and + the needs of "downstream" clients necessitate eliminating some of the + instances from further consideration. Accomplishing this pruning by + re-authoring all of the per-instance attributes is not very attractive, + since it may mean destructively editing a large quantity of data. We + therefore provide means of "masking" instances by ID, such that the + instance data is unmolested, but per-instance transform and primvar data + can be retrieved with the no-longer-desired instances eliminated from the + (smaller) arrays. PointInstancer allows two independent means of masking + instances by ID, each with different features that meet the needs of + various clients in a pipeline. Both pruning features' lists of ID's are + combined to produce the mask returned by ComputeMaskAtTime(). + + \\note If a PointInstancer has no authored \\em ids attribute, the masking + features will still be available, with the integers specifying element + position in the \\em protoIndices array rather than ID. + + \\subsection UsdGeomPointInstancer_inactiveIds InactiveIds: List-edited, Unvarying Masking + + The first masking feature encodes a list of IDs in a list-editable metadatum + called \\em inactiveIds, which, although it does not have any similar + impact to stage population as \\ref UsdPrim::SetActive() "prim activation", + it shares with that feature that its application is uniform over all time. + Because it is list-editable, we can \\em sparsely add and remove instances + from it in many layers. + + This sparse application pattern makes \\em inactiveIds a good choice when + further downstream clients may need to reverse masking decisions made + upstream, in a manner that is robust to many kinds of future changes to + the upstream data. + + See ActivateId(), ActivateIds(), DeactivateId(), DeactivateIds(), + ActivateAllIds() + + \\subsection UsdGeomPointInstancer_invisibleIds invisibleIds: Animatable Masking + + The second masking feature encodes a list of IDs in a time-varying + Int64Array-valued UsdAttribute called \\em invisibleIds , since it shares + with \\ref UsdGeomImageable::GetVisibilityAttr() "Imageable visibility" + the ability to animate object visibility. + + Unlike \\em inactiveIds, overriding a set of opinions for \\em invisibleIds + is not at all straightforward, because one will, in general need to + reauthor (in the overriding layer) **all** timeSamples for the attribute + just to change one Id's visibility state, so it cannot be authored + sparsely. But it can be a very useful tool for situations like encoding + pre-computed camera-frustum culling of geometry when either or both of + the instances or the camera is animated. + + See VisId(), VisIds(), InvisId(), InvisIds(), VisAllIds() + + \\section UsdGeomPointInstancer_protoProcessing Processing and Not Processing Prototypes + + Any prim in the scenegraph can be targeted as a prototype by the + \\em prototypes relationship. We do not, however, provide a specific + mechanism for identifying prototypes as geometry that should not be drawn + (or processed) in their own, local spaces in the scenegraph. We + encourage organizing all prototypes as children of the PointInstancer + prim that consumes them, and pruning "raw" processing and drawing + traversals when they encounter a PointInstancer prim; this is what the + UsdGeomBBoxCache and UsdImaging engines do. + + There \\em is a pattern one can deploy for organizing the prototypes + such that they will automatically be skipped by basic UsdPrim::GetChildren() + or UsdPrimRange traversals. Usd prims each have a + \\ref Usd_PrimSpecifiers "specifier" of "def", "over", or "class". The + default traversals skip over prims that are "pure overs" or classes. So + to protect prototypes from all generic traversals and processing, place + them under a prim that is just an "over". For example, + \\code + 01 def PointInstancer "Crowd_Mid" + 02 { + 03 rel prototypes = [ , ] + 04 + 05 over "Prototypes" + 06 { + 07 def "MaleThin_Business" ( + 08 references = [@MaleGroupA/usd/MaleGroupA.usd@] + 09 variants = { + 10 string modelingVariant = "Thin" + 11 string costumeVariant = "BusinessAttire" + 12 } + 13 ) + 14 { ... } + 15 + 16 def "MaleThin_Casual" + 17 ... + 18 } + 19 } + \\endcode + """ + + inherits = + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + dictionary schemaTokens = { + dictionary inactiveIds = { + string doc = """int64listop prim metadata that specifies + the PointInstancer ids that should be masked (unrenderable) + over all time.""" + } + } + } +) { + rel prototypes ( + doc = """Required property. Orders and targets the prototype root + prims, which can be located anywhere in the scenegraph that is convenient, + although we promote organizing prototypes as children of the + PointInstancer. The position of a prototype in this relationship defines + the value an instance would specify in the \\em protoIndices attribute to + instance that prototype. Since relationships are uniform, this property + cannot be animated.""" + ) + + int[] protoIndices ( + doc = """Required property. Per-instance index into + \\em prototypes relationship that identifies what geometry should be + drawn for each instance. Topology attribute - can be animated, + but at a potential performance impact for streaming.""" + ) + + int64[] ids ( + doc = """Ids are optional; if authored, the ids array should be the same + length as the \\em protoIndices array, specifying (at each timeSample if + instance identities are changing) the id of each instance. The + type is signed intentionally, so that clients can encode some + binary state on Id'd instances without adding a separate primvar. + See also \\ref UsdGeomPointInstancer_varyingTopo""" + ) + + point3f[] positions ( + doc = """Required property. Per-instance position. See also + \\ref UsdGeomPointInstancer_transform .""" + ) + + quath[] orientations ( + doc="""If authored, per-instance orientation of each instance about its + prototype's origin, represented as a unit length quaternion, which + allows us to encode it with sufficient precision in a compact GfQuath. + + It is client's responsibility to ensure that authored quaternions are + unit length; the convenience API below for authoring orientations from + rotation matrices will ensure that quaternions are unit length, though + it will not make any attempt to select the "better (for interpolation + with respect to neighboring samples)" of the two possible quaternions + that encode the rotation. + + See also \\ref UsdGeomPointInstancer_transform .""" ) + + float3[] scales ( + doc="""If authored, per-instance scale to be applied to + each instance, before any rotation is applied. + + See also \\ref UsdGeomPointInstancer_transform .""" + ) + + vector3f[] velocities ( + doc = """If provided, per-instance 'velocities' will be used to + compute positions between samples for the 'positions' attribute, + rather than interpolating between neighboring 'positions' samples. + Velocities should be considered mandatory if both \\em protoIndices + and \\em positions are animated. Velocity is measured in position + units per second, as per most simulation software. To convert to + position units per UsdTimeCode, divide by + UsdStage::GetTimeCodesPerSecond(). + + See also \\ref UsdGeomPointInstancer_transform, + \\ref UsdGeom_VelocityInterpolation .""" + ) + + vector3f[] accelerations ( + doc = """If authored, per-instance 'accelerations' will be used with + velocities to compute positions between samples for the 'positions' + attribute rather than interpolating between neighboring 'positions' + samples. Acceleration is measured in position units per second-squared. + To convert to position units per squared UsdTimeCode, divide by the + square of UsdStage::GetTimeCodesPerSecond().""" + ) + + vector3f[] angularVelocities ( + doc="""If authored, per-instance angular velocity vector to be used for + interoplating orientations. Angular velocities should be considered + mandatory if both \\em protoIndices and \\em orientations are animated. + Angular velocity is measured in degrees per second. To convert + to degrees per UsdTimeCode, divide by + UsdStage::GetTimeCodesPerSecond(). + + See also \\ref UsdGeomPointInstancer_transform .""" + ) + + int64[] invisibleIds = [] ( + doc="""A list of id's to make invisible at the evaluation time. + See \\ref UsdGeomPointInstancer_invisibleIds .""" + ) +} + + +class Camera "Camera" ( + doc = """Transformable camera. + + Describes optical properties of a camera via a common set of attributes + that provide control over the camera's frustum as well as its depth of + field. For stereo, the left and right camera are individual prims tagged + through the \\ref UsdGeomCamera::GetStereoRoleAttr() "stereoRole attribute". + + There is a corresponding class GfCamera, which can hold the state of a + camera (at a particular time). \\ref UsdGeomCamera::GetCamera() and + \\ref UsdGeomCamera::SetFromCamera() convert between a USD camera prim and + a GfCamera. + + To obtain the camera's location in world space, call the following on a + UsdGeomCamera 'camera': + \\code + GfMatrix4d camXform = camera.ComputeLocalToWorldTransform(time); + \\endcode + \\note + Cameras in USD are always "Y up", regardless of the stage's orientation + (i.e. UsdGeomGetStageUpAxis()). This means that the inverse of + 'camXform' (the VIEW half of the MODELVIEW transform in OpenGL parlance) + will transform the world such that the camera is at the origin, looking + down the -Z axis, with +Y as the up axis, and +X pointing to the right. + This describes a __right handed coordinate system__. + + \\section UsdGeom_CameraUnits Units of Measure for Camera Properties + + Despite the familiarity of millimeters for specifying some physical + camera properties, UsdGeomCamera opts for greater consistency with all + other UsdGeom schemas, which measure geometric properties in scene units, + as determined by UsdGeomGetStageMetersPerUnit(). We do make a + concession, however, in that lens and filmback properties are measured in + __tenths of a scene unit__ rather than "raw" scene units. This means + that with the fallback value of .01 for _metersPerUnit_ - i.e. scene unit + of centimeters - then these "tenth of scene unit" properties are + effectively millimeters. + + \\note If one adds a Camera prim to a UsdStage whose scene unit is not + centimeters, the fallback values for filmback properties will be + incorrect (or at the least, unexpected) in an absolute sense; however, + proper imaging through a "default camera" with focusing disabled depends + only on ratios of the other properties, so the camera is still usable. + However, it follows that if even one property is authored in the correct + scene units, then they all must be. + + + \\sa \\ref UsdGeom_LinAlgBasics + """ + inherits = + customData = { + string extraIncludes = """ +#include "pxr/base/gf/camera.h" """ + } +) { + # viewing frustum + token projection = "perspective" ( + allowedTokens = ["perspective", "orthographic"]) + float horizontalAperture = 20.9550 ( + doc = """Horizontal aperture in tenths of a scene unit; see + \\ref UsdGeom_CameraUnits . Default is the equivalent of + the standard 35mm spherical projector aperture.""") + float verticalAperture = 15.2908 ( + doc = """Vertical aperture in tenths of a scene unit; see + \\ref UsdGeom_CameraUnits . Default is the equivalent of + the standard 35mm spherical projector aperture.""") + float horizontalApertureOffset = 0.0 ( + doc = """Horizontal aperture offset in the same units as + horizontalAperture. Defaults to 0.""") + float verticalApertureOffset = 0.0 ( + doc = """Vertical aperture offset in the same units as + verticalAperture. Defaults to 0.""") + float focalLength = 50.0 ( + doc = """Perspective focal length in tenths of a scene unit; see + \\ref UsdGeom_CameraUnits .""") + float2 clippingRange = (1, 1000000) ( + doc = """Near and far clipping distances in scene units; see + \\ref UsdGeom_CameraUnits .""") + float4[] clippingPlanes = [] ( + doc = """Additional, arbitrarily oriented clipping planes. + A vector (a,b,c,d) encodes a clipping plane that cuts off + (x,y,z) with a * x + b * y + c * z + d * 1 < 0 where (x,y,z) + are the coordinates in the camera's space.""") + + # depth of field + float fStop = 0.0 ( + doc = """Lens aperture. Defaults to 0.0, which turns off focusing.""") + float focusDistance = 0.0 ( + doc = """Distance from the camera to the focus plane in scene units; see + \\ref UsdGeom_CameraUnits .""") + + # stereoscopic 3D + uniform token stereoRole = "mono" ( + allowedTokens = ["mono", "left", "right"] + doc = """If different from mono, the camera is intended to be the left + or right camera of a stereo setup.""") + + # Parameters for motion blur + double shutter:open = 0.0 ( + doc = """Frame relative shutter open time in UsdTimeCode units (negative + value indicates that the shutter opens before the current + frame time). Used for motion blur.""" + ) + double shutter:close = 0.0 ( + doc = """Frame relative shutter close time, analogous comments from + shutter:open apply. A value greater or equal to shutter:open + should be authored, otherwise there is no exposure and a + renderer should produce a black image.""" + ) + + # exposure adjustment + float exposure = 0.0 ( + doc = """Exposure adjustment, as a log base-2 value. The default + of 0.0 has no effect. A value of 1.0 will double the + image-plane intensities in a rendered image; a value of + -1.0 will halve them.""" + ) +} + +class "GeomModelAPI" +( + inherits = + customData = { + string className = "ModelAPI" + string extraIncludes = """ +#include "pxr/usd/usdGeom/bboxCache.h" +#include "pxr/usd/usdGeom/constraintTarget.h" +#include "pxr/usd/usdGeom/imageable.h" """ + dictionary schemaTokens = { + dictionary extentsHint = { + string doc = """Name of the attribute used to author extents + hints at the root of leaf models. Extents hints are stored by purpose + as a vector of GfVec3f values. They are ordered based on the order + of purpose tokens returned by + UsdGeomImageable::GetOrderedPurposeTokens.""" + } + } + } + doc = """UsdGeomModelAPI extends the generic UsdModelAPI schema with + geometry specific concepts such as cached extents for the entire model, + constraint targets, and geometry-inspired extensions to the payload + lofting process. + + As described in GetExtentsHint() below, it is useful to cache extents + at the model level. UsdGeomModelAPI provides schema for computing and + storing these cached extents, which can be consumed by UsdGeomBBoxCache to + provide fast access to precomputed extents that will be used as the model's + bounds ( see UsdGeomBBoxCache::UsdGeomBBoxCache() ). + + \\section UsdGeomModelAPI_drawMode Draw Modes + + Draw modes provide optional alternate imaging behavior for USD subtrees with + kind model. \\em model:drawMode (which is inheritable) and + \\em model:applyDrawMode (which is not) are resolved into a decision to stop + traversing the scene graph at a certain point, and replace a USD subtree + with proxy geometry. + + The value of \\em model:drawMode determines the type of proxy geometry: + - \\em origin - Draw the model-space basis vectors of the replaced prim. + - \\em bounds - Draw the model-space bounding box of the replaced prim. + - \\em cards - Draw textured quads as a placeholder for the replaced prim. + - \\em default - An explicit opinion to draw the USD subtree as normal. + - \\em inherited - Defer to the parent opinion. + + \\em model:drawMode falls back to _inherited_ so that a whole scene, + a large group, or all prototypes of a model hierarchy PointInstancer can + be assigned a draw mode with a single attribute edit. If no draw mode is + explicitly set in a hierarchy, the resolved value is _default_. + + \\em model:applyDrawMode is meant to be written when an asset is authored, + and provides flexibility for different asset types. For example, + a character assembly (composed of character, clothes, etc) might have + \\em model:applyDrawMode set at the top of the subtree so the whole group + can be drawn as a single card object. An effects subtree might have + \\em model:applyDrawMode set at a lower level so each particle + group draws individually. + + Models of kind component are treated as if \\em model:applyDrawMode + were true. This means a prim is drawn with proxy geometry when: the + prim has kind component, and/or \\em model:applyDrawMode is set; and + the prim's resolved value for \\em model:drawMode is not _default_. + + \\section UsdGeomModelAPI_cardGeometry Cards Geometry + + The specific geometry used in cards mode is controlled by the + \\em model:cardGeometry attribute: + - \\em cross - Generate a quad normal to each basis direction and negative. + Locate each quad so that it bisects the model extents. + - \\em box - Generate a quad normal to each basis direction and negative. + Locate each quad on a face of the model extents, facing out. + - \\em fromTexture - Generate a quad for each supplied texture from + attributes stored in that texture's metadata. + + For \\em cross and \\em box mode, the extents are calculated for purposes + \\em default, \\em proxy, and \\em render, at their earliest authored time. + If the model has no textures, all six card faces are rendered using + \\em model:drawModeColor. If one or more textures are present, only axes + with one or more textures assigned are drawn. For each axis, if both + textures (positive and negative) are specified, they'll be used on the + corresponding card faces; if only one texture is specified, it will be + mapped to the opposite card face after being flipped on the texture's + s-axis. Any card faces with invalid asset paths will be drawn with + \\em model:drawModeColor. + + Both \\em model:cardGeometry and \\em model:drawModeColor should be + authored on the prim where the draw mode takes effect, since these + attributes are not inherited. + + For \\em fromTexture mode, only card faces with valid textures assigned + are drawn. The geometry is generated by pulling the \\em worldtoscreen + attribute out of texture metadata. This is expected to be a 4x4 matrix + mapping the model-space position of the card quad to the clip-space quad + with corners (-1,-1,0) and (1,1,0). The card vertices are generated by + transforming the clip-space corners by the inverse of \\em worldtoscreen. + Textures are mapped so that (s) and (t) map to (+x) and (+y) in clip space. + If the metadata cannot be read in the right format, or the matrix can't + be inverted, the card face is not drawn. + + All card faces are drawn and textured as single-sided. + + \\todo CreatePayload() """ +) +{ + uniform token model:drawMode = "inherited" ( + allowedTokens = ["origin", "bounds", "cards", "default", "inherited"] + doc = """Alternate imaging mode; applied to this prim or child prims + where \\em model:applyDrawMode is true, or where the prim + has kind \\em component. See \\ref UsdGeomModelAPI_drawMode + for mode descriptions.""" + ) + uniform bool model:applyDrawMode = false ( + doc = """If true, and the resolved value of \\em model:drawMode is + non-default, apply an alternate imaging mode to this prim. See + \\ref UsdGeomModelAPI_drawMode.""" + ) + uniform float3 model:drawModeColor = (0.18, 0.18, 0.18) ( + doc = """The base color of imaging prims inserted for alternate + imaging modes. For \\em origin and \\em bounds modes, this + controls line color; for \\em cards mode, this controls the + fallback quad color.""" + ) + uniform token model:cardGeometry = "cross" ( + allowedTokens = ["cross", "box", "fromTexture"] + doc = """The geometry to generate for imaging prims inserted for \\em + cards imaging mode. See \\ref UsdGeomModelAPI_cardGeometry for + geometry descriptions.""" + ) + asset model:cardTextureXPos ( + doc = """In \\em cards imaging mode, the texture applied to the X+ quad. + The texture axes (s,t) are mapped to model-space axes (-y, -z).""" + ) + asset model:cardTextureYPos ( + doc = """In \\em cards imaging mode, the texture applied to the Y+ quad. + The texture axes (s,t) are mapped to model-space axes (x, -z).""" + ) + asset model:cardTextureZPos ( + doc = """In \\em cards imaging mode, the texture applied to the Z+ quad. + The texture axes (s,t) are mapped to model-space axes (x, -y).""" + ) + asset model:cardTextureXNeg ( + doc = """In \\em cards imaging mode, the texture applied to the X- quad. + The texture axes (s,t) are mapped to model-space axes (y, -z).""" + ) + asset model:cardTextureYNeg ( + doc = """In \\em cards imaging mode, the texture applied to the Y- quad. + The texture axes (s,t) are mapped to model-space axes (-x, -z).""" + ) + asset model:cardTextureZNeg ( + doc = """In \\em cards imaging mode, the texture applied to the Z- quad. + The texture axes (s,t) are mapped to model-space axes (-x, -y).""" + ) +} + +class "MotionAPI" +( + inherits = + doc = """UsdGeomMotionAPI encodes data that can live on any prim that + may affect computations involving: + - computed motion for motion blur + - sampling for motion blur + + The \\ref GetMotionBlurScaleAttr() "motion:blurScale" attribute allows + artists to scale the __amount__ of motion blur to be rendered for parts + of the scene without changing the recorded animation. See + \\ref UsdGeomMotionAPI_blurScale for use and implementation details. + + """ +) +{ + float motion:blurScale = 1.0 ( + doc = """BlurScale is an __inherited__ float attribute that stipulates + the rendered motion blur (as typically specified via UsdGeomCamera's + _shutter:open_ and _shutter:close_ properties) should be scaled for + __all objects__ at and beneath the prim in namespace on which the + _motion:blurScale_ value is specified. + + Without changing any other data in the scene, _blurScale_ allows artists to + "dial in" the amount of blur on a per-object basis. A _blurScale_ + value of zero removes all blur, a value of 0.5 reduces blur by half, + and a value of 2.0 doubles the blur. The legal range for _blurScale_ + is [0, inf), although very high values may result in extremely expensive + renders, and may exceed the capabilities of some renderers. + + Although renderers are free to implement this feature however they see + fit, see \\ref UsdGeomMotionAPI_blurScale for our guidance on implementing + the feature universally and efficiently. + + \\sa ComputeMotionBlurScale() + """ + ) + + float motion:velocityScale = 1.0 ( + customData = { + string apiName = "velocityScale" + } + doc = """\\deprecated + + VelocityScale is an **inherited** float attribute that + velocity-based schemas (e.g. PointBased, PointInstancer) can consume + to compute interpolated positions and orientations by applying + velocity and angularVelocity, which is required for interpolating + between samples when topology is varying over time. Although these + quantities are generally physically computed by a simulator, sometimes + we require more or less motion-blur to achieve the desired look. + VelocityScale allows artists to dial-in, as a post-sim correction, + a scale factor to be applied to the velocity prior to computing + interpolated positions from it.""" + ) + + int motion:nonlinearSampleCount = 3 ( + customData = { + string apiName = "nonlinearSampleCount" + } + doc = """Determines the number of position or transformation samples + created when motion is described by attributes contributing non-linear + terms. + + To give an example, imagine an application (such as a + renderer) consuming 'points' and the USD document also + contains 'accelerations' for the same prim. Unless the + application can consume these 'accelerations' itself, an + intermediate layer has to compute samples within the sampling + interval for the point positions based on the value of + 'points', 'velocities' and 'accelerations'. The number of these + samples is given by 'nonlinearSampleCount'. The samples are + equally spaced within the sampling interval. + + Another example involves the PointInstancer where + 'nonlinearSampleCount' is relevant when 'angularVelocities' + or 'accelerations' are authored. + + 'nonlinearSampleCount' is an **inherited** attribute, also + see ComputeNonlinearSampleCount()""" + ) +} + +class "XformCommonAPI" +( + inherits = + doc = """This class provides API for authoring and retrieving a standard set + of component transformations which include a scale, a rotation, a + scale-rotate pivot and a translation. The goal of the API is to enhance + component-wise interchange. It achieves this by limiting the set of allowed + basic ops and by specifying the order in which they are applied. In addition + to the basic set of ops, the 'resetXformStack' bit can also be set to + indicate whether the underlying xformable resets the parent transformation + (i.e. does not inherit it's parent's transformation). + + \\sa UsdGeomXformCommonAPI::GetResetXformStack() + \\sa UsdGeomXformCommonAPI::SetResetXformStack() + + The operator-bool for the class will inform you whether an existing + xformable is compatible with this API. + + The scale-rotate pivot is represented by a pair of (translate, + inverse-translate) xformOps around the scale and rotate operations. + The rotation operation can be any of the six allowed Euler angle sets. + \\sa UsdGeomXformOp::Type. + + The xformOpOrder of an xformable that has all of the supported basic ops + is as follows: + ["xformOp:translate", "xformOp:translate:pivot", "xformOp:rotateXYZ", + "xformOp:scale", "!invert!xformOp:translate:pivot"]. + + It is worth noting that all of the ops are optional. For example, an + xformable may have only a translate or a rotate. It would still be + considered as compatible with this API. Individual SetTranslate(), + SetRotate(), SetScale() and SetPivot() methods are provided by this API + to allow such sparse authoring.""" + customData = { + string apiSchemaType = "nonApplied" + string extraIncludes = """ +#include "pxr/usd/usdGeom/xformable.h" +#include "pxr/usd/usdGeom/xformOp.h" """ + dictionary schemaTokens = { + dictionary pivot = { + string doc = """Op suffix for the standard scale-rotate pivot + on a UsdGeomXformCommonAPI-compatible prim. + """ + } + } + } +) +{ +} + +class HermiteCurves "HermiteCurves" ( + inherits = + doc = """This schema specifies a cubic hermite interpolated curve batch as + sometimes used for defining guides for animation. While hermite curves can + be useful because they interpolate through their control points, they are + not well supported by high-end renderers for imaging. Therefore, while we + include this schema for interchange, we strongly recommend the use of + UsdGeomBasisCurves as the representation of curves intended to be rendered + (ie. hair or grass). Hermite curves can be converted to a Bezier + representation (though not from Bezier back to Hermite in general). + + \\section UsdGeomHermiteCurves_Interpolation Point Interpolation + + The initial cubic curve segment is defined by the first two points and + first two tangents. Additional segments are defined by additional + point / tangent pairs. The number of segments for each non-batched hermite + curve would be len(curve.points) - 1. The total number of segments + for the batched UsdGeomHermiteCurves representation is + len(points) - len(curveVertexCounts). + + \\section UsdGeomHermiteCurves_Primvars Primvar, Width, and Normal Interpolation + + Primvar interpolation is not well specified for this type as it is not + intended as a rendering representation. We suggest that per point + primvars would be linearly interpolated across each segment and should + be tagged as 'varying'. + + It is not immediately clear how to specify cubic or 'vertex' interpolation + for this type, as we lack a specification for primvar tangents. This + also means that width and normal interpolation should be restricted to + varying (linear), uniform (per curve element), or constant (per prim). + """ +) { + vector3f[] tangents = [] ( + doc = """Defines the outgoing trajectory tangent for each point. + Tangents should be the same size as the points attribute.""") +} diff --git a/blender/lib/usd/usdHydra/resources/generatedSchema.usda b/blender/lib/usd/usdHydra/resources/generatedSchema.usda new file mode 100644 index 0000000..48f07cb --- /dev/null +++ b/blender/lib/usd/usdHydra/resources/generatedSchema.usda @@ -0,0 +1,29 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class "HydraGenerativeProceduralAPI" ( + doc = """ + This API extends and configures the core UsdProcGenerativeProcedural schema + defined within usdProc for use with hydra generative procedurals as defined + within hdGp. + """ +) +{ + token primvars:hdGp:proceduralType ( + doc = """The registered name of a HdGpGenerativeProceduralPlugin to + be executed.""" + ) + token proceduralSystem = "hydraGenerativeProcedural" ( + doc = ''' + This value should correspond to a configured instance of + HdGpGenerativeProceduralResolvingSceneIndex which will evaluate the + procedural. The default value of "hydraGenerativeProcedural" matches + the equivalent default of HdGpGenerativeProceduralResolvingSceneIndex. + Multiple instances of the scene index can be used to determine where + within a scene index chain a given procedural will be evaluated. + ''' + ) +} + diff --git a/blender/lib/usd/usdHydra/resources/plugInfo.json b/blender/lib/usd/usdHydra/resources/plugInfo.json new file mode 100644 index 0000000..a898e76 --- /dev/null +++ b/blender/lib/usd/usdHydra/resources/plugInfo.json @@ -0,0 +1,37 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdHydraDiscoveryPlugin": { + "bases": [ + "NdrDiscoveryPlugin" + ], + "displayName": "Discovery plugin for deprecated hydra shaders." + }, + "UsdHydraGenerativeProceduralAPI": { + "alias": { + "UsdSchemaBase": "HydraGenerativeProceduralAPI" + }, + "apiSchemaCanOnlyApplyTo": [ + "GenerativeProcedural" + ], + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + } + } + }, + "LibraryPath": "", + "Name": "usdHydra", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdHydra/resources/shaders/empty.glslfx b/blender/lib/usd/usdHydra/resources/shaders/empty.glslfx new file mode 100644 index 0000000..ffe586e --- /dev/null +++ b/blender/lib/usd/usdHydra/resources/shaders/empty.glslfx @@ -0,0 +1,35 @@ +-- glslfx version 0.1 + +// +// Copyright 2018 Pixar +// +// Licensed under the Apache License, Version 2.0 (the "Apache License") +// with the following modification; you may not use this file except in +// compliance with the Apache License and the following modification to it: +// Section 6. Trademarks. is deleted and replaced with: +// +// 6. Trademarks. This License does not grant permission to use the trade +// names, trademarks, service marks, or product names of the Licensor +// and its affiliates, except as required to comply with Section 4(c) of +// the License and to reproduce the content of the NOTICE file. +// +// You may obtain a copy of the Apache License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the Apache License with the above modification is +// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the Apache License for the specific +// language governing permissions and limitations under the Apache License. +// + +-- configuration +{ + "techniques": { + "default": { + } + } +} + +-- This file is intentionally empty. diff --git a/blender/lib/usd/usdHydra/resources/shaders/shaderDefs.usda b/blender/lib/usd/usdHydra/resources/shaders/shaderDefs.usda new file mode 100644 index 0000000..1d8d074 --- /dev/null +++ b/blender/lib/usd/usdHydra/resources/shaders/shaderDefs.usda @@ -0,0 +1,252 @@ +#usda 1.0 + +def Shader "HwPtexTexture_1" ( + sdrMetadata = { + token role = "texture" + token isPtex = "1" + } +) +{ + uniform token info:id = "HwPtexTexture_1" + uniform token info:implementationSource = "sourceAsset" + + # Add a dummy sourceAsset attribute with sourceType="glslfx", so that + # an entry gets created for this deprecated shader in the registry. + uniform asset info:glslfx:sourceAsset = @./empty.glslfx@ + + token inputs:faceIndexPrimvar = "ptexFaceIndex" ( + sdrMetadata = { + token primvarProperty = "1" + } + ) + + token inputs:faceOffsetPrimvar = "ptexFaceOffset" ( + sdrMetadata = { + token primvarProperty = "1" + } + ) + + asset inputs:file = @@ ( + doc = "Asset path to the file containg the image data." + ) + + float inputs:frame ( + doc = "The frame offset for animated textures." + ) + + float inputs:textureMemory ( + doc = """Amount of memory used to store the texture (in Mb). A value of + zero specifies the native resolution.""" + ) + + int inputs:faceIndex ( + doc = """The Ptex face index from which to sample. By default, this + parameter will be bound to the ptexFaceIndex primvar.""" + ) + + int inputs:faceOffset ( + doc = """The Ptex face offset to be applied to the face index. By + default, this parameter will be bound to the ptexFaceOffset primvar.""" + ) + + float outputs:r ( + doc = "Outputs the red channel." + sdrMetadata = { + token swizzle = "x" + } + ) + + float outputs:g ( + doc = "Outputs the green channel." + sdrMetadata = { + token swizzle = "y" + } + ) + + float outputs:b ( + doc = "Outputs the blue channnel." + sdrMetadata = { + token swizzle = "z" + } + ) + + float outputs:a ( + doc = "Outputs the alpha channnel." + sdrMetadata = { + token swizzle = "w" + } + ) + + float3 outputs:rgb ( + doc = "Outputs the red, green and blue channels." + sdrMetadata = { + token swizzle = "xyz" + } + ) + + float4 outputs:rgba ( + doc = "Outputs all 4 channels (red, green, blue and alpha)." + sdrMetadata = { + token swizzle = "xyzw" + } + ) +} + +def Shader "HwUvTexture_1" ( + sdrMetadata = { + token role = "texture" + token primvars = "uv" + } +) +{ + uniform token info:id = "HwUvTexture_1" + uniform token info:implementationSource = "sourceAsset" + + # Add a dummy sourceAsset attribute with sourceType="glslfx", so that + # an entry gets created for this deprecated shader in the registry. + uniform asset info:glslfx:sourceAsset = @./empty.glslfx@ + + asset inputs:file = @@ ( + doc = "Asset path to the file containg the image data." + ) + + float inputs:frame ( + doc = "The frame offset for animated textures." + ) + + float inputs:textureMemory ( + doc = """Amount of memory used to store the texture (in Mb). A value of + zero specifies the native resolution.""" + ) + + float2 inputs:uv ( + doc = "The uv coordinates at which to sample the texture." + ) + + token inputs:wrapS ( + allowedTokens = ["clamp", "repeat", "mirror", "black"] + doc = "Specifies the wrap mode for this texture." + ) + + token inputs:wrapT ( + allowedTokens = ["clamp", "repeat", "mirror", "black"] + doc = "Specifies the wrap mode for this texture." + ) + + token inputs:minFilter ( + allowedTokens = ["nearest", "linear", + "linearMipmapLinear", "linearMipmapNearest", + "nearestMipmapLinear", "nearestMipmapNearest" ] + doc = "Specifies the minification filter mode for this texture." + ) + + token inputs:magFilter ( + allowedTokens = ["nearest", "linear"] + doc = "Specifies the magnification filter mode for this texture." + ) + + float4 inputs:fallback = (0.0, 0.0, 0.0, 1.0) ( + doc = """Fallback value to be used when no texture is connected.""" + sdrMetadata = { + token defaultInput = "1" + } + ) +} + +def Shader "HwFieldReader_1" ( + sdrMetadata = { + token role = "field" + } + doc = """Samples from a usdVol FieldAsset.""" +) +{ + uniform asset info:glslfx:sourceAsset = @./empty.glslfx@ + uniform token info:implementationSource = "sourceAsset" + token inputs:fieldname = "" ( + connectability = "interfaceOnly" + doc = """Name of the field to be fetched from volume + (e.g., FOO if the relationship from the volume to field is called field::FOO).""" + sdrMetadata = { + token fieldProperty = "1" + } + ) +} + +def Shader "HwFieldReader_float" ( + inherits = +) +{ + uniform token info:id = "HwFieldReader_float" + float inputs:fallback = 0 ( + doc = "Fallback value to be returned when fetch failed." + sdrMetadata = { + token defaultInput = "1" + } + ) + float outputs:result ( + sdrMetadata = { + token swizzle = "x" + } + ) +} + +def Shader "HwFieldReader_float2" ( + inherits = +) +{ + uniform token info:id = "HwFieldReader_float2" + float2 inputs:fallback = (0, 0) ( + doc = "Fallback value to be returned when fetch failed." + sdrMetadata = { + token defaultInput = "1" + } + ) + float2 outputs:result ( + sdrMetadata = { + token swizzle = "xy" + } + ) +} + +def Shader "HwFieldReader_float3" ( + inherits = +) +{ + uniform token info:id = "HwFieldReader_float3" + float3 inputs:fallback = (0, 0, 0) ( + doc = "Fallback value to be returned when fetch failed." + sdrMetadata = { + token defaultInput = "1" + } + ) + float3 outputs:result ( + sdrMetadata = { + token swizzle = "xyz" + } + ) +} + +def Shader "HwPrimvar_1" ( + sdrMetadata = { + token role = "primvar" + } +) +{ + uniform token info:id = "HwPrimvar_1" + uniform token info:implementationSource = "sourceAsset" + + # Add a dummy sourceAsset attribute with sourceType="glslfx", so that + # an entry gets created for this deprecated shader in the registry. + uniform asset info:glslfx:sourceAsset = @./empty.glslfx@ + + token inputs:varname = "" ( + sdrMetadata = { + bool primvarProperty = 1 + } + doc = """The name of the primvar. Note that on the gprim, this primvar + must follow the standard UsdGeom primvar declaration. + Further note that this name should not contain the UsdGeom primvar + namespace prefix. + """ + ) +} diff --git a/blender/lib/usd/usdHydra/resources/usdHydra/schema.usda b/blender/lib/usd/usdHydra/resources/usdHydra/schema.usda new file mode 100644 index 0000000..dcb5162 --- /dev/null +++ b/blender/lib/usd/usdHydra/resources/usdHydra/schema.usda @@ -0,0 +1,201 @@ +#usda 1.0 +( + "This file describes the Pixar-specific USD Geometric schemata for code generation." + subLayers = [ + @usd/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdHydra" + string libraryPath = "pxr/usd/usdHydra" + dictionary libraryTokens = { + + dictionary HwPrimvar_1 = { + string doc = """The id value of a Primvar shader.""" + } + + dictionary HwPtexTexture_1 = { + string doc = """The id value of a PtexTexture shader.""" + } + + dictionary HwUvTexture_1 = { + string doc = """The id value of a UvTexture shader.""" + } + + dictionary displayLookBxdf = { + string value = "displayLook:bxdf" + string doc = """\deprecated This has been deprecated in favor of + the glslfx:surface output. + + Relationship on a material that targets the "bxdf" or the + surface shader prim.""" + } + + dictionary infoFilename = { + string value = "inputs:file" + string doc = """ The special "info:filename" property of a hydra + Texture shader, which points to a resolvable texture asset.""" + } + + + dictionary infoVarname = { + string value = "inputs:varname" + string doc = """ + """ + } + + dictionary textureMemory = { + string doc = """A shader input on a hydra Texture shader.""" + } + + dictionary frame = { + string doc = """A shader input on a "Texture" shader.""" + } + + dictionary uv = { + string doc = """A shader input on a hydra UvTexture shader.""" + } + + dictionary wrapS = { + string doc = """A shader input on a hydra UvTexture shader which + defines the behavior of texture coordinates that are outside the + bounds of the texture.""" + } + + + dictionary wrapT = { + string doc = """A shader input on a hydra UvTexture shader which + defines the behavior of texture coordinates that are outside the + bounds of the texture.""" + } + + dictionary black = { + string doc = """Possible value for "wrapT" and "wrapS" inputs on + a "UvTexture" shader prim. + Causes black to be returned when sampling outside the bounds of + the texture.""" + } + + dictionary clamp = { + string doc = """Possible value for "wrapT" and "wrapS" inputs on + a "UvTexture" shader prim. + Causes the the texture coordinate to be clamped to [0,1].""" + } + + dictionary mirror = { + string doc = """Possible value for "wrapT" and "wrapS" inputs on + a "UvTexture" shader prim. + Causes the texture coordinate to wrap around like a mirror. -0.2 + becomes 0.2, -1.2 becomes 0.8, etc. ,""" + } + + dictionary repeat = { + string doc = """Possible value for "wrapT" and "wrapS" inputs on + a "UvTexture" shader prim. + Causes the texture coordinate to wrap around the texture. So a + texture coordinate of -0.2 becomes the equivalent of 0.8.""" + } + + dictionary useMetadata = { + string doc = """Possible value for "wrapT" and "wrapS" inputs on + a "UvTexture" shader prim. + Causes the wrap value to be loaded from the texture file instead + of being specified in the prim. If the texture file doesn't + support metadata or the metadata doesn't contain a wrap mode, + the "black" wrap mode is used.""" + } + + dictionary magFilter = { + string doc = """An input on a UvTexture shader.""" + } + + dictionary minFilter = { + string doc = """An input on a UvTexture shader.""" + } + + dictionary linearMipmapLinear = { + string doc = """See https://www.opengl.org/wiki/Sampler_Object , + Possible value for the "minFilter" input on a UvTexture shader. + """ + } + + dictionary linearMipmapNearest = { + string doc = """See https://www.opengl.org/wiki/Sampler_Object + Possible value for the "minFilter" input on a UvTexture shader. + """ + } + + dictionary nearestMipmapNearest = { + string doc = """See https://www.opengl.org/wiki/Sampler_Object + Possible value for the "minFilter" input on a UvTexture shader. + """ + } + + dictionary linear = { + string doc = """A weighted linear blend of nearest adjacent + samples. + Possible value for "minFilter" and "magFilter" inputs on a + UvTextureshader.""" + } + + dictionary nearest = { + string doc = """Selects the nearest sample for the given + coordinate + Possible value for "minFilter" and "magFilter" inputs on a + UvTexture shader.""" + } + + dictionary nearestMipmapLinear = { + string doc = """See https://www.opengl.org/wiki/Sampler_Object + Possible value for "minFilter" and "magFilter" inputs on a + UvTexture shader.""" + } + + dictionary faceIndex = { + string doc = """The "faceIndex" shader input on a hydra + "PtexTexture" shader.""" + } + + dictionary faceOffset = { + string doc = """The "faceOffset" shader input on a hydra + "PtexTexture" shader.""" + } + } + } +){ +} + +class "HydraGenerativeProceduralAPI" ( + inherits = + doc = """ + This API extends and configures the core UsdProcGenerativeProcedural schema + defined within usdProc for use with hydra generative procedurals as defined + within hdGp. + """ + customData = { + string className = "GenerativeProceduralAPI" + token[] apiSchemaCanOnlyApplyTo = ["GenerativeProcedural"] + } +){ + token primvars:hdGp:proceduralType ( + doc = """The registered name of a HdGpGenerativeProceduralPlugin to + be executed.""" + + customData = { + string apiName = "proceduralType" + } + ) + + token proceduralSystem = "hydraGenerativeProcedural" ( + doc = """ + This value should correspond to a configured instance of + HdGpGenerativeProceduralResolvingSceneIndex which will evaluate the + procedural. The default value of "hydraGenerativeProcedural" matches + the equivalent default of HdGpGenerativeProceduralResolvingSceneIndex. + Multiple instances of the scene index can be used to determine where + within a scene index chain a given procedural will be evaluated. + """ + ) +} diff --git a/blender/lib/usd/usdImaging/resources/plugInfo.json b/blender/lib/usd/usdImaging/resources/plugInfo.json new file mode 100644 index 0000000..126c4ef --- /dev/null +++ b/blender/lib/usd/usdImaging/resources/plugInfo.json @@ -0,0 +1,315 @@ +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "faceIndexPrimvar": { + "appliesTo": [ + "attributes" + ], + "default": "ptexFaceIndex", + "documentation": "Specifies an array of face indices used for ptex mapping", + "type": "token" + }, + "faceOffsetPrimvar": { + "appliesTo": [ + "attributes" + ], + "default": "ptexFaceOffset", + "documentation": "Specifies the ptex face index offset for aggregated ptex files", + "type": "token" + }, + "uvPrimvar": { + "appliesTo": [ + "attributes" + ], + "default": "", + "documentation": "Specifies the UV primvar for texture mapping", + "type": "token" + } + }, + "Types": { + "UsdImagingBasisCurvesAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "BasisCurves" + }, + "UsdImagingCameraAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "Camera" + }, + "UsdImagingCapsuleAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Capsule" + }, + "UsdImagingCollectionAPIAdapter" : { + "bases": ["UsdImagingAPISchemaAdapter"], + "isInternal": true, + "apiSchemaName": "CollectionAPI" + }, + "UsdImagingConeAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Cone" + }, + "UsdImagingCoordSysAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "coordSys" + }, + "UsdImagingCoordSysAPIAdapter" : { + "bases": ["UsdImagingAPISchemaAdapter"], + "isInternal": true, + "apiSchemaName": "CoordSysAPI" + }, + "UsdImagingCubeAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Cube" + }, + "UsdImagingCylinderAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Cylinder" + }, + "UsdImagingDrawModeAdapter" : { + "bases": ["UsdImagingPrimAdapter"], + "isInternal": true, + "primTypeName": "__drawModeAdapter" + }, + "UsdImagingHermiteCurvesAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "HermiteCurves" + }, + "UsdImagingMaterialAdapter" : { + "bases": ["UsdImagingPrimAdapter"], + "isInternal": true, + "primTypeName": "Material" + }, + + "UsdImagingShaderAdapter" : { + "bases": ["UsdImagingRepresentedByAncestorPrimAdapter"], + "isInternal": true, + "primTypeName": "Shader" + }, + + "UsdImagingMaterialBindingAPIAdapter" : { + "bases": ["UsdImagingAPISchemaAdapter"], + "isInternal": true, + "apiSchemaName": "MaterialBindingAPI" + }, + "UsdImagingMeshAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Mesh" + }, + "UsdImagingNurbsCurvesAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "NurbsCurves" + }, + "UsdImagingNurbsPatchAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "NurbsPatch" + }, + "UsdImagingPlaneAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Plane" + }, + "UsdImagingPointsAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Points" + }, + "UsdImagingPointInstancerAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "PointInstancer" + }, + "UsdImagingSphereAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Sphere" + }, + "UsdImagingRenderSettingsAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "RenderSettings" + }, + "UsdImagingRenderProductAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "RenderProduct" + }, + "UsdImagingRenderVarAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "RenderVar" + }, + "UsdImagingSampleFilterAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "PxrSampleFilterPluginBase", + "includeDerivedPrimTypes" : true + }, + "UsdImagingDisplayFilterAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "PxrDisplayFilterPluginBase", + "includeDerivedPrimTypes": true + }, + "UsdImagingVolumeAdapter": { + "bases": [ + "UsdImagingGprimAdapter" + ], + "isInternal": true, + "primTypeName": "Volume" + }, + "UsdImagingLightAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "LightAPI", + "includeDerivedPrimTypes" : true + }, + "UsdImagingLightAPIAdapter": { + "bases": [ + "UsdImagingAPISchemaAdapter" + ], + "isInternal": true, + "apiSchemaName": "LightAPI" + }, + "UsdImagingLightFilterAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "LightFilter", + "includeDerivedPrimTypes" : true + }, + "UsdImagingDomeLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "DomeLight" + }, + "UsdImagingRectLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "RectLight" + }, + "UsdImagingSphereLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "SphereLight" + }, + "UsdImagingCylinderLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "CylinderLight" + }, + "UsdImagingDiskLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "DiskLight" + }, + "UsdImagingDistantLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "DistantLight" + }, + "UsdImagingPluginLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "PluginLight" + }, + "UsdImagingGeometryLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "GeometryLight" + }, + "UsdImagingPortalLightAdapter": { + "bases": [ + "UsdImagingLightAdapter" + ], + "isInternal": true, + "primTypeName": "PortalLight" + }, + "UsdImagingPluginLightFilterAdapter": { + "bases": [ + "UsdImagingLightFilterAdapter" + ], + "isInternal": true, + "primTypeName": "PluginLightFilter" + } + } + }, + "LibraryPath": "", + "Name": "usdImaging", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdImagingGL/resources/plugInfo.json b/blender/lib/usd/usdImagingGL/resources/plugInfo.json new file mode 100644 index 0000000..e20cc5f --- /dev/null +++ b/blender/lib/usd/usdImagingGL/resources/plugInfo.json @@ -0,0 +1,12 @@ +{ + "Plugins": [ + { + "Info" : {}, + "LibraryPath": "", + "Name": "usdImagingGL", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdLux/resources/generatedSchema.usda b/blender/lib/usd/usdLux/resources/generatedSchema.usda new file mode 100644 index 0000000..edb2643 --- /dev/null +++ b/blender/lib/usd/usdLux/resources/generatedSchema.usda @@ -0,0 +1,1535 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class "LightAPI" ( + apiSchemas = ["CollectionAPI:lightLink", "CollectionAPI:shadowLink"] + customData = { + token[] apiSchemaOverridePropertyNames = ["collection:lightLink:includeRoot", "collection:shadowLink:includeRoot"] + } + doc = """API schema that imparts the quality of being a light onto a prim. + + A light is any prim that has this schema applied to it. This is true + regardless of whether LightAPI is included as a built-in API of the prim + type (e.g. RectLight or DistantLight) or is applied directly to a Gprim + that should be treated as a light. + + Linking + + Lights can be linked to geometry. Linking controls which geometry + a light illuminates, and which geometry casts shadows from the light. + + Linking is specified as collections (UsdCollectionAPI) which can + be accessed via GetLightLinkCollection() and GetShadowLinkCollection(). + Note that these collections have their includeRoot set to true, + so that lights will illuminate and cast shadows from all objects + by default. To illuminate only a specific set of objects, there + are two options. One option is to modify the collection paths + to explicitly exclude everything else, assuming it is known; + the other option is to set includeRoot to false and explicitly + include the desired objects. These are complementary approaches + that may each be preferable depending on the scenario and how + to best express the intent of the light setup. + """ +) +{ + uniform bool collection:lightLink:includeRoot = 1 + uniform bool collection:shadowLink:includeRoot = 1 + color3f inputs:color = (1, 1, 1) ( + displayGroup = "Basic" + displayName = "Color" + doc = "The color of emitted light, in energy-linear terms." + ) + float inputs:colorTemperature = 6500 ( + displayGroup = "Basic" + displayName = "Color Temperature" + doc = """Color temperature, in degrees Kelvin, representing the + white point. The default is a common white point, D65. Lower + values are warmer and higher values are cooler. The valid range + is from 1000 to 10000. Only takes effect when + enableColorTemperature is set to true. When active, the + computed result multiplies against the color attribute. + See UsdLuxBlackbodyTemperatureAsRgb().""" + ) + float inputs:diffuse = 1 ( + displayGroup = "Refine" + displayName = "Diffuse Multiplier" + doc = """A multiplier for the effect of this light on the diffuse + response of materials. This is a non-physical control.""" + ) + bool inputs:enableColorTemperature = 0 ( + displayGroup = "Basic" + displayName = "Enable Color Temperature" + doc = "Enables using colorTemperature." + ) + float inputs:exposure = 0 ( + displayGroup = "Basic" + displayName = "Exposure" + doc = """Scales the power of the light exponentially as a power + of 2 (similar to an F-stop control over exposure). The result + is multiplied against the intensity.""" + ) + float inputs:intensity = 1 ( + displayGroup = "Basic" + displayName = "Intensity" + doc = "Scales the power of the light linearly." + ) + bool inputs:normalize = 0 ( + displayGroup = "Advanced" + displayName = "Normalize Power" + doc = """Normalizes power by the surface area of the light. + This makes it easier to independently adjust the power and shape + of the light, by causing the power to not vary with the area or + angular size of the light.""" + ) + float inputs:specular = 1 ( + displayGroup = "Refine" + displayName = "Specular Multiplier" + doc = """A multiplier for the effect of this light on the specular + response of materials. This is a non-physical control.""" + ) + rel light:filters ( + doc = "Relationship to the light filters that apply to this light." + ) + uniform token light:materialSyncMode = "noMaterialResponse" ( + allowedTokens = ["materialGlowTintsLight", "independent", "noMaterialResponse"] + displayGroup = "Geometry" + displayName = "Material Sync Mode" + doc = """For a LightAPI applied to geometry that has a bound Material, + which is entirely or partly emissive, this specifies the relationship + of the Material response to the lighting response. + Valid values are: + - materialGlowTintsLight: All primary and secondary rays see the + emissive/glow response as dictated by the bound Material while the + base color seen by light rays (which is then modulated by all of the + other LightAPI controls) is the multiplication of the color feeding + the emission/glow input of the Material (i.e. its surface or volume + shader) with the scalar or pattern input to *inputs:color*. + This allows the light's color to tint the geometry's glow color while + preserving access to intensity and other light controls as ways to + further modulate the illumination. + - independent: All primary and secondary rays see the emissive/glow + response as dictated by the bound Material, while the base color seen + by light rays is determined solely by *inputs:color*. Note that for + partially emissive geometry (in which some parts are reflective + rather than emissive), a suitable pattern must be connected to the + light's color input, or else the light will radiate uniformly from + the geometry. + - noMaterialResponse: The geometry behaves as if there is no Material + bound at all, i.e. there is no diffuse, specular, or transmissive + response. The base color of light rays is entirely controlled by the + *inputs:color*. This is the standard mode for \"canonical\" lights in + UsdLux and indicates to renderers that a Material will either never + be bound or can always be ignored. + """ + ) + uniform token light:shaderId = "" ( + displayGroup = "Internal" + doc = """Default ID for the light's shader. + This defines the shader ID for this light when a render context specific + shader ID is not available. + + The default shaderId for the intrinsic UsdLux lights (RectLight, + DistantLight, etc.) are set to default to the light's type name. For + each intrinsic UsdLux light, we will always register an SdrShaderNode in + the SdrRegistry, with the identifier matching the type name and the + source type \"USD\", that corresponds to the light's inputs. + \\see GetShaderId + \\see GetShaderIdAttrForRenderContext + \\see SdrRegistry::GetShaderNodeByIdentifier + \\see SdrRegistry::GetShaderNodeByIdentifierAndType + """ + ) +} + +class "MeshLightAPI" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:materialSyncMode", "light:shaderId"] + } + doc = '''This is the preferred API schema to apply to + "Mesh" type prims when adding light behaviors to a mesh. + At its base, this API schema has the built-in behavior of applying LightAPI + to the mesh and overriding the default materialSyncMode to allow the + emission/glow of the bound material to affect the color of the light. + But, it additionally serves as a hook for plugins to attach additional + properties to "mesh lights" through the creation of API schemas which are + authored to auto-apply to MeshLightAPI. + \\see \\ref Usd_AutoAppliedAPISchemas + ''' +) +{ + uniform token light:materialSyncMode = "materialGlowTintsLight" + uniform token light:shaderId = "MeshLight" +} + +class "VolumeLightAPI" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:materialSyncMode", "light:shaderId"] + } + doc = '''This is the preferred API schema to apply to + "Volume" type prims when adding light behaviors to a + volume. At its base, this API schema has the built-in behavior of applying + LightAPI to the volume and overriding the default materialSyncMode to allow + the emission/glow of the bound material to affect the color of the light. + But, it additionally serves as a hook for plugins to attach additional + properties to "volume lights" through the creation of API schemas which are + authored to auto-apply to VolumeLightAPI. + \\see \\ref Usd_AutoAppliedAPISchemas + ''' +) +{ + uniform token light:materialSyncMode = "materialGlowTintsLight" + uniform token light:shaderId = "VolumeLight" +} + +class "LightListAPI" ( + doc = '''API schema to support discovery and publishing of lights in a scene. + + Discovering Lights via Traversal + + To motivate this API, consider what is required to discover all + lights in a scene. We must load all payloads and traverse all prims: + + \\code + 01 // Load everything on the stage so we can find all lights, + 02 // including those inside payloads + 03 stage->Load(); + 04 + 05 // Traverse all prims, checking if they have an applied UsdLuxLightAPI + 06 // (Note: ignoring instancing and a few other things for simplicity) + 07 SdfPathVector lights; + 08 for (UsdPrim prim: stage->Traverse()) { + 09 if (prim.HasAPI()) { + 10 lights.push_back(i->GetPath()); + 11 } + 12 } + \\endcode + + This traversal -- suitably elaborated to handle certain details -- + is the first and simplest thing UsdLuxLightListAPI provides. + UsdLuxLightListAPI::ComputeLightList() performs this traversal and returns + all lights in the scene: + + \\code + 01 UsdLuxLightListAPI listAPI(stage->GetPseudoRoot()); + 02 SdfPathVector lights = listAPI.ComputeLightList(); + \\endcode + + Publishing a Cached Light List + + Consider a USD client that needs to quickly discover lights but + wants to defer loading payloads and traversing the entire scene + where possible, and is willing to do up-front computation and + caching to achieve that. + + UsdLuxLightListAPI provides a way to cache the computed light list, + by publishing the list of lights onto prims in the model + hierarchy. Consider a big set that contains lights: + + \\code + 01 def Xform "BigSetWithLights" ( + 02 kind = "assembly" + 03 payload = @BigSetWithLights.usd@ // Heavy payload + 04 ) { + 05 // Pre-computed, cached list of lights inside payload + 06 rel lightList = [ + 07 <./Lights/light_1>, + 08 <./Lights/light_2>, + 09 ... + 10 ] + 11 token lightList:cacheBehavior = "consumeAndContinue"; + 12 } + \\endcode + + The lightList relationship encodes a set of lights, and the + lightList:cacheBehavior property provides fine-grained + control over how to use that cache. (See details below.) + + The cache can be created by first invoking + ComputeLightList(ComputeModeIgnoreCache) to pre-compute the list + and then storing the result with UsdLuxLightListAPI::StoreLightList(). + + To enable efficient retrieval of the cache, it should be stored + on a model hierarchy prim. Furthermore, note that while you can + use a UsdLuxLightListAPI bound to the pseudo-root prim to query the + lights (as in the example above) because it will perform a + traversal over descendants, you cannot store the cache back to the + pseduo-root prim. + + To consult the cached list, we invoke + ComputeLightList(ComputeModeConsultModelHierarchyCache): + + \\code + 01 // Find and load all lights, using lightList cache where available + 02 UsdLuxLightListAPI list(stage->GetPseudoRoot()); + 03 SdfPathSet lights = list.ComputeLightList( + 04 UsdLuxLightListAPI::ComputeModeConsultModelHierarchyCache); + 05 stage.LoadAndUnload(lights, SdfPathSet()); + \\endcode + + In this mode, ComputeLightList() will traverse the model + hierarchy, accumulating cached light lists. + + Controlling Cache Behavior + + The lightList:cacheBehavior property gives additional fine-grained + control over cache behavior: + + - The fallback value, "ignore", indicates that the lightList should + be disregarded. This provides a way to invalidate cache entries. + Note that unless "ignore" is specified, a lightList with an empty + list of targets is considered a cache indicating that no lights + are present. + + - The value "consumeAndContinue" indicates that the cache should + be consulted to contribute lights to the scene, and that recursion + should continue down the model hierarchy in case additional lights + are added as descedants. This is the default value established when + StoreLightList() is invoked. This behavior allows the lights within + a large model, such as the BigSetWithLights example above, to be + published outside the payload, while also allowing referencing and + layering to add additional lights over that set. + + - The value "consumeAndHalt" provides a way to terminate recursive + traversal of the scene for light discovery. The cache will be + consulted but no descendant prims will be examined. + + Instancing + + Where instances are present, UsdLuxLightListAPI::ComputeLightList() will + return the instance-unique paths to any lights discovered within + those instances. Lights within a UsdGeomPointInstancer will + not be returned, however, since they cannot be referred to + solely via paths. +''' +) +{ + rel lightList ( + doc = "Relationship to lights in the scene." + ) + token lightList:cacheBehavior ( + allowedTokens = ["consumeAndHalt", "consumeAndContinue", "ignore"] + doc = """Controls how the lightList should be interpreted. + Valid values are: + - consumeAndHalt: The lightList should be consulted, + and if it exists, treated as a final authoritative statement + of any lights that exist at or below this prim, halting + recursive discovery of lights. + - consumeAndContinue: The lightList should be consulted, + but recursive traversal over nameChildren should continue + in case additional lights are added by descendants. + - ignore: The lightList should be entirely ignored. This + provides a simple way to temporarily invalidate an existing + cache. This is the fallback behavior. + """ + ) +} + +class "ListAPI" ( + doc = """ + \\deprecated + Use LightListAPI instead +""" +) +{ + rel lightList ( + doc = "Relationship to lights in the scene." + ) + token lightList:cacheBehavior ( + allowedTokens = ["consumeAndHalt", "consumeAndContinue", "ignore"] + doc = """Controls how the lightList should be interpreted. + Valid values are: + - consumeAndHalt: The lightList should be consulted, + and if it exists, treated as a final authoritative statement + of any lights that exist at or below this prim, halting + recursive discovery of lights. + - consumeAndContinue: The lightList should be consulted, + but recursive traversal over nameChildren should continue + in case additional lights are added by descendants. + - ignore: The lightList should be entirely ignored. This + provides a simple way to temporarily invalidate an existing + cache. This is the fallback behavior. + """ + ) +} + +class "ShapingAPI" ( + doc = "Controls for shaping a light's emission." +) +{ + float inputs:shaping:cone:angle = 90 ( + displayGroup = "Shaping" + displayName = "Cone Angle" + doc = """Angular limit off the primary axis to restrict the + light spread.""" + ) + float inputs:shaping:cone:softness = 0 ( + displayGroup = "Shaping" + displayName = "Cone Softness" + doc = """Controls the cutoff softness for cone angle. + TODO: clarify semantics""" + ) + float inputs:shaping:focus = 0 ( + displayGroup = "Shaping" + displayName = "Emission Focus" + doc = """A control to shape the spread of light. Higher focus + values pull light towards the center and narrow the spread. + Implemented as an off-axis cosine power exponent. + TODO: clarify semantics""" + ) + color3f inputs:shaping:focusTint = (0, 0, 0) ( + displayGroup = "Shaping" + displayName = "Emission Focus Tint" + doc = """Off-axis color tint. This tints the emission in the + falloff region. The default tint is black. + TODO: clarify semantics""" + ) + float inputs:shaping:ies:angleScale = 0 ( + displayGroup = "Shaping" + displayName = "Profile Scale" + doc = """Rescales the angular distribution of the IES profile. + TODO: clarify semantics""" + ) + asset inputs:shaping:ies:file ( + displayGroup = "Shaping" + displayName = "IES Profile" + doc = """An IES (Illumination Engineering Society) light + profile describing the angular distribution of light.""" + ) + bool inputs:shaping:ies:normalize = 0 ( + displayGroup = "Shaping" + displayName = "Profile Normalization" + doc = """Normalizes the IES profile so that it affects the shaping + of the light while preserving the overall energy output.""" + ) +} + +class "ShadowAPI" ( + doc = """Controls to refine a light's shadow behavior. These are + non-physical controls that are valuable for visual lighting work.""" +) +{ + color3f inputs:shadow:color = (0, 0, 0) ( + displayGroup = "Shadows" + displayName = "Shadow Color" + doc = """The color of shadows cast by the light. This is a + non-physical control. The default is to cast black shadows.""" + ) + float inputs:shadow:distance = -1 ( + displayGroup = "Shadows" + displayName = "Shadow Max Distance" + doc = """The maximum distance shadows are cast. The distance is + measured as the distance between the point on the surface and the + occluder. + The default value (-1) indicates no limit. + """ + ) + bool inputs:shadow:enable = 1 ( + displayGroup = "Shadows" + displayName = "Enable Shadows" + doc = "Enables shadows to be cast by this light." + ) + float inputs:shadow:falloff = -1 ( + displayGroup = "Shadows" + displayName = "Shadow Falloff" + doc = """The size of the shadow falloff zone within the shadow max + distance, which can be used to hide the hard cut-off for shadows seen + stretching past the max distance. The falloff zone is the area that + fades from full shadowing at the beginning of the falloff zone to no + shadowing at the max distance from the occluder. The falloff zone + distance cannot exceed the shadow max distance. A falloff value equal + to or less than zero (with -1 as the default) indicates no falloff. + """ + ) + float inputs:shadow:falloffGamma = 1 ( + displayGroup = "Shadows" + displayName = "Shadow Falloff Gamma" + doc = """A gamma (i.e., exponential) control over shadow strength + with linear distance within the falloff zone. This controls the rate + of the falloff. + This requires the use of shadowDistance and shadowFalloff.""" + ) +} + +class LightFilter "LightFilter" ( + apiSchemas = ["CollectionAPI:filterLink"] + customData = { + token[] apiSchemaOverridePropertyNames = ["collection:filterLink:includeRoot"] + } + doc = """A light filter modifies the effect of a light. + Lights refer to filters via relationships so that filters may be + shared. + + Linking + + Filters can be linked to geometry. Linking controls which geometry + a light-filter affects, when considering the light filters attached + to a light illuminating the geometry. + + Linking is specified as a collection (UsdCollectionAPI) which can + be accessed via GetFilterLinkCollection(). + """ +) +{ + uniform bool collection:filterLink:includeRoot = 1 + uniform token lightFilter:shaderId = "" ( + displayGroup = "Internal" + doc = """Default ID for the light filter's shader. + This defines the shader ID for this light filter when a render context + specific shader ID is not available. + + \\see GetShaderId + \\see GetShaderIdAttrForRenderContext + \\see SdrRegistry::GetShaderNodeByIdentifier + \\see SdrRegistry::GetShaderNodeByIdentifierAndType + """ + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "BoundableLightBase" ( + apiSchemas = ["LightAPI"] + doc = """Base class for intrinsic lights that are boundable. + + The primary purpose of this class is to provide a direct API to the + functions provided by LightAPI for concrete derived light types. + """ +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "NonboundableLightBase" ( + apiSchemas = ["LightAPI"] + doc = """Base class for intrinsic lights that are not boundable. + + The primary purpose of this class is to provide a direct API to the + functions provided by LightAPI for concrete derived light types. + """ +) +{ + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class DistantLight "DistantLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["inputs:intensity", "light:shaderId"] + } + doc = """Light emitted from a distant source along the -Z axis. + Also known as a directional light.""" +) +{ + float inputs:angle = 0.53 ( + displayGroup = "Basic" + displayName = "Angle Extent" + doc = """Angular size of the light in degrees. + As an example, the Sun is approximately 0.53 degrees as seen from Earth. + Higher values broaden the light and therefore soften shadow edges. + """ + ) + float inputs:intensity = 50000 ( + doc = """Scales the emission of the light linearly. + The DistantLight has a high default intensity to approximate the Sun.""" + ) + uniform token light:shaderId = "DistantLight" + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class DiskLight "DiskLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:shaderId"] + } + doc = """Light emitted from one side of a circular disk. + The disk is centered in the XY plane and emits light along the -Z axis.""" +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + float inputs:radius = 0.5 ( + displayGroup = "Geometry" + displayName = "Radius" + doc = "Radius of the disk." + ) + uniform token light:shaderId = "DiskLight" + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class RectLight "RectLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:shaderId"] + } + doc = """Light emitted from one side of a rectangle. + The rectangle is centered in the XY plane and emits light along the -Z axis. + The rectangle is 1 unit in length in the X and Y axis. In the default + position, a texture file's min coordinates should be at (+X, +Y) and + max coordinates at (-X, -Y).""" +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + float inputs:height = 1 ( + displayGroup = "Geometry" + displayName = "Height" + doc = "Height of the rectangle, in the local Y axis." + ) + asset inputs:texture:file ( + displayGroup = "Basic" + displayName = "Color Map" + doc = "A color texture to use on the rectangle." + ) + float inputs:width = 1 ( + displayGroup = "Geometry" + displayName = "Width" + doc = "Width of the rectangle, in the local X axis." + ) + uniform token light:shaderId = "RectLight" + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class SphereLight "SphereLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:shaderId"] + } + doc = "Light emitted outward from a sphere." +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + float inputs:radius = 0.5 ( + displayGroup = "Geometry" + displayName = "Radius" + doc = "Radius of the sphere." + ) + uniform token light:shaderId = "SphereLight" + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + bool treatAsPoint = 0 ( + displayGroup = "Advanced" + displayName = "Treat As Point" + doc = """A hint that this light can be treated as a 'point' + light (effectively, a zero-radius sphere) by renderers that + benefit from non-area lighting. Renderers that only support + area lights can disregard this.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class CylinderLight "CylinderLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:shaderId"] + } + doc = """Light emitted outward from a cylinder. + The cylinder is centered at the origin and has its major axis on the X axis. + The cylinder does not emit light from the flat end-caps. + """ +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + float inputs:length = 1 ( + displayGroup = "Geometry" + displayName = "Length" + doc = "Length of the cylinder, in the local X axis." + ) + float inputs:radius = 0.5 ( + displayGroup = "Geometry" + displayName = "Radius" + doc = "Radius of the cylinder." + ) + uniform token light:shaderId = "CylinderLight" + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + bool treatAsLine = 0 ( + displayGroup = "Advanced" + displayName = "Treat As Line" + doc = """A hint that this light can be treated as a 'line' + light (effectively, a zero-radius cylinder) by renderers that + benefit from non-area lighting. Renderers that only support + area lights can disregard this.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class GeometryLight "GeometryLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:shaderId"] + } + doc = """\\deprecated + Light emitted outward from a geometric prim (UsdGeomGprim), + which is typically a mesh.""" +) +{ + rel geometry ( + doc = "Relationship to the geometry to use as the light source." + ) + uniform token light:shaderId = "GeometryLight" + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class DomeLight "DomeLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:shaderId"] + } + doc = """Light emitted inward from a distant external environment, + such as a sky or IBL light probe. The orientation of a dome light with a + latlong texture is expected to match the OpenEXR specification for latlong + environment maps. From the OpenEXR documentation: + + ------------------------------------------------------------------------- + Latitude-Longitude Map: + + The environment is projected onto the image using polar coordinates + (latitude and longitude). A pixel's x coordinate corresponds to + its longitude, and the y coordinate corresponds to its latitude. + Pixel (dataWindow.min.x, dataWindow.min.y) has latitude +pi/2 and + longitude +pi; pixel (dataWindow.max.x, dataWindow.max.y) has + latitude -pi/2 and longitude -pi. + + In 3D space, latitudes -pi/2 and +pi/2 correspond to the negative and + positive y direction. Latitude 0, longitude 0 points into positive + z direction; and latitude 0, longitude pi/2 points into positive x + direction. + + The size of the data window should be 2*N by N pixels (width by height), + where N can be any integer greater than 0. + ------------------------------------------------------------------------- +""" +) +{ + float guideRadius = 100000 ( + displayGroup = "Guides" + displayName = "Radius" + doc = "The radius of guide geometry to use to visualize the dome light. The default is 1 km for scenes whose metersPerUnit is the USD default of 0.01 (i.e., 1 world unit is 1 cm)." + ) + asset inputs:texture:file ( + displayGroup = "Basic" + displayName = "Color Map" + doc = """A color texture to use on the dome, such as an HDR (high + dynamic range) texture intended for IBL (image based lighting).""" + ) + token inputs:texture:format = "automatic" ( + allowedTokens = ["automatic", "latlong", "mirroredBall", "angular", "cubeMapVerticalCross"] + displayGroup = "Basic" + displayName = "Color Map Format" + doc = """Specifies the parameterization of the color map file. + Valid values are: + - automatic: Tries to determine the layout from the file itself. + For example, Renderman texture files embed an explicit + parameterization. + - latlong: Latitude as X, longitude as Y. + - mirroredBall: An image of the environment reflected in a + sphere, using an implicitly orthogonal projection. + - angular: Similar to mirroredBall but the radial dimension + is mapped linearly to the angle, providing better sampling + at the edges. + - cubeMapVerticalCross: A cube map with faces laid out as a + vertical cross. + """ + ) + uniform token light:shaderId = "DomeLight" + rel portals ( + doc = "Optional portals to guide light sampling." + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class PortalLight "PortalLight" ( + apiSchemas = ["LightAPI"] + customData = { + token[] apiSchemaOverridePropertyNames = ["light:shaderId"] + } + doc = """A rectangular portal in the local XY plane that guides sampling + of a dome light. Transmits light in the -Z direction. + The rectangle is 1 unit in length.""" +) +{ + float3[] extent = [(-0.5, -0.5, 0), (0.5, 0.5, 0)] ( + doc = """Boundary extent of the unit rectangle in the XY plane that + defines the portal.""" + ) + uniform token light:shaderId = "PortalLight" + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class PluginLight "PluginLight" ( + apiSchemas = ["NodeDefAPI", "LightAPI"] + doc = """Light that provides properties that allow it to identify an + external SdrShadingNode definition, through UsdShadeNodeDefAPI, that can be + provided to render delegates without the need to provide a schema + definition for the light's type. + + \\see \\ref usdLux_PluginSchemas +""" +) +{ + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class PluginLightFilter "PluginLightFilter" ( + apiSchemas = ["NodeDefAPI", "CollectionAPI:filterLink"] + customData = { + token[] apiSchemaOverridePropertyNames = ["collection:filterLink:includeRoot"] + } + doc = """Light filter that provides properties that allow it to identify an + external SdrShadingNode definition, through UsdShadeNodeDefAPI, that can be + provided to render delegates without the need to provide a schema + definition for the light filter's type. + + \\see \\ref usdLux_PluginSchemas +""" +) +{ + uniform bool collection:filterLink:includeRoot = 1 + uniform token lightFilter:shaderId = "" ( + displayGroup = "Internal" + doc = """Default ID for the light filter's shader. + This defines the shader ID for this light filter when a render context + specific shader ID is not available. + + \\see GetShaderId + \\see GetShaderIdAttrForRenderContext + \\see SdrRegistry::GetShaderNodeByIdentifier + \\see SdrRegistry::GetShaderNodeByIdentifierAndType + """ + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + diff --git a/blender/lib/usd/usdLux/resources/plugInfo.json b/blender/lib/usd/usdLux/resources/plugInfo.json new file mode 100644 index 0000000..bc619a3 --- /dev/null +++ b/blender/lib/usd/usdLux/resources/plugInfo.json @@ -0,0 +1,234 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdLuxBoundableLightBase": { + "alias": { + "UsdSchemaBase": "BoundableLightBase" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomBoundable" + ], + "schemaKind": "abstractTyped" + }, + "UsdLuxCylinderLight": { + "alias": { + "UsdSchemaBase": "CylinderLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxBoundableLightBase" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdLuxDiskLight": { + "alias": { + "UsdSchemaBase": "DiskLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxBoundableLightBase" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdLuxDistantLight": { + "alias": { + "UsdSchemaBase": "DistantLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxNonboundableLightBase" + ], + "schemaKind": "concreteTyped" + }, + "UsdLuxDomeLight": { + "alias": { + "UsdSchemaBase": "DomeLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxNonboundableLightBase" + ], + "schemaKind": "concreteTyped" + }, + "UsdLuxGeometryLight": { + "alias": { + "UsdSchemaBase": "GeometryLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxNonboundableLightBase" + ], + "schemaKind": "concreteTyped" + }, + "UsdLuxLightAPI": { + "alias": { + "UsdSchemaBase": "LightAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "providesUsdShadeConnectableAPIBehavior": true, + "schemaKind": "singleApplyAPI" + }, + "UsdLuxLightFilter": { + "alias": { + "UsdSchemaBase": "LightFilter" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "providesUsdShadeConnectableAPIBehavior": true, + "schemaKind": "concreteTyped" + }, + "UsdLuxLightListAPI": { + "alias": { + "UsdSchemaBase": "LightListAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdLuxListAPI": { + "alias": { + "UsdSchemaBase": "ListAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdLuxMeshLightAPI": { + "alias": { + "UsdSchemaBase": "MeshLightAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdLuxNonboundableLightBase": { + "alias": { + "UsdSchemaBase": "NonboundableLightBase" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "schemaKind": "abstractTyped" + }, + "UsdLuxPluginLight": { + "alias": { + "UsdSchemaBase": "PluginLight" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "schemaKind": "concreteTyped" + }, + "UsdLuxPluginLightFilter": { + "alias": { + "UsdSchemaBase": "PluginLightFilter" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxLightFilter" + ], + "schemaKind": "concreteTyped" + }, + "UsdLuxPortalLight": { + "alias": { + "UsdSchemaBase": "PortalLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxBoundableLightBase" + ], + "schemaKind": "concreteTyped" + }, + "UsdLuxRectLight": { + "alias": { + "UsdSchemaBase": "RectLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxBoundableLightBase" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdLuxShadowAPI": { + "alias": { + "UsdSchemaBase": "ShadowAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdLuxShapingAPI": { + "alias": { + "UsdSchemaBase": "ShapingAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdLuxSphereLight": { + "alias": { + "UsdSchemaBase": "SphereLight" + }, + "autoGenerated": true, + "bases": [ + "UsdLuxBoundableLightBase" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdLuxVolumeLightAPI": { + "alias": { + "UsdSchemaBase": "VolumeLightAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdLux_DiscoveryPlugin": { + "bases": [ + "NdrDiscoveryPlugin" + ] + }, + "UsdLux_LightDefParserPlugin": { + "bases": [ + "NdrParserPlugin" + ] + } + } + }, + "LibraryPath": "", + "Name": "usdLux", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdLux/resources/usdLux/schema.usda b/blender/lib/usd/usdLux/resources/usdLux/schema.usda new file mode 100644 index 0000000..95ecfba --- /dev/null +++ b/blender/lib/usd/usdLux/resources/usdLux/schema.usda @@ -0,0 +1,999 @@ +#usda 1.0 +( + "This file describes the USD Lux light schemata for code generation." + subLayers = [ + @usdGeom/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdLux" + string libraryPath = "pxr/usd/usdLux" + dictionary libraryTokens = { + dictionary lightLink = { + string doc = """ + This token represents the collection name to use + with UsdCollectionAPI to represent light-linking + of a prim with an applied UsdLuxLightAPI. + """ + } + dictionary shadowLink = { + string doc = """ + This token represents the collection name to use + with UsdCollectionAPI to represent shadow-linking + of a prim with an applied UsdLuxLightAPI. + """ + } + dictionary filterLink = { + string doc = """ + This token represents the collection name to use + with UsdCollectionAPI to represent filter-linking + of a UsdLuxLightFilter prim. + """ + } + dictionary orientToStageUpAxis = { + string doc = """ + This token represents the suffix for a UsdGeomXformOp + used to orient a light with the stage's up axis. + """ + } + } + } +) +{ +} + +class "LightAPI" ( + inherits = + doc = """API schema that imparts the quality of being a light onto a prim. + + A light is any prim that has this schema applied to it. This is true + regardless of whether LightAPI is included as a built-in API of the prim + type (e.g. RectLight or DistantLight) or is applied directly to a Gprim + that should be treated as a light. + + Linking + + Lights can be linked to geometry. Linking controls which geometry + a light illuminates, and which geometry casts shadows from the light. + + Linking is specified as collections (UsdCollectionAPI) which can + be accessed via GetLightLinkCollection() and GetShadowLinkCollection(). + Note that these collections have their includeRoot set to true, + so that lights will illuminate and cast shadows from all objects + by default. To illuminate only a specific set of objects, there + are two options. One option is to modify the collection paths + to explicitly exclude everything else, assuming it is known; + the other option is to set includeRoot to false and explicitly + include the desired objects. These are complementary approaches + that may each be preferable depending on the scenario and how + to best express the intent of the light setup. + """ + customData = { + dictionary extraPlugInfo = { + bool providesUsdShadeConnectableAPIBehavior = 1 + } + string extraIncludes = """ +#include "pxr/usd/usd/collectionAPI.h" +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" """ + } + prepend apiSchemas = ["CollectionAPI:lightLink", "CollectionAPI:shadowLink"] +) { + uniform bool collection:lightLink:includeRoot = 1 ( + customData = { + bool apiSchemaOverride = true + } + ) + uniform bool collection:shadowLink:includeRoot = 1 ( + customData = { + bool apiSchemaOverride = true + } + ) + uniform token light:shaderId = "" ( + displayGroup = "Internal" + doc = """Default ID for the light's shader. + This defines the shader ID for this light when a render context specific + shader ID is not available. + + The default shaderId for the intrinsic UsdLux lights (RectLight, + DistantLight, etc.) are set to default to the light's type name. For + each intrinsic UsdLux light, we will always register an SdrShaderNode in + the SdrRegistry, with the identifier matching the type name and the + source type "USD", that corresponds to the light's inputs. + \\see GetShaderId + \\see GetShaderIdAttrForRenderContext + \\see SdrRegistry::GetShaderNodeByIdentifier + \\see SdrRegistry::GetShaderNodeByIdentifierAndType + """ + customData = { + token apiName = "shaderId" + } + ) + uniform token light:materialSyncMode = "noMaterialResponse" ( + displayGroup = "Geometry" + displayName = "Material Sync Mode" + doc = """For a LightAPI applied to geometry that has a bound Material, + which is entirely or partly emissive, this specifies the relationship + of the Material response to the lighting response. + Valid values are: + - materialGlowTintsLight: All primary and secondary rays see the + emissive/glow response as dictated by the bound Material while the + base color seen by light rays (which is then modulated by all of the + other LightAPI controls) is the multiplication of the color feeding + the emission/glow input of the Material (i.e. its surface or volume + shader) with the scalar or pattern input to *inputs:color*. + This allows the light's color to tint the geometry's glow color while + preserving access to intensity and other light controls as ways to + further modulate the illumination. + - independent: All primary and secondary rays see the emissive/glow + response as dictated by the bound Material, while the base color seen + by light rays is determined solely by *inputs:color*. Note that for + partially emissive geometry (in which some parts are reflective + rather than emissive), a suitable pattern must be connected to the + light's color input, or else the light will radiate uniformly from + the geometry. + - noMaterialResponse: The geometry behaves as if there is no Material + bound at all, i.e. there is no diffuse, specular, or transmissive + response. The base color of light rays is entirely controlled by the + *inputs:color*. This is the standard mode for "canonical" lights in + UsdLux and indicates to renderers that a Material will either never + be bound or can always be ignored. + """ + allowedTokens = ["materialGlowTintsLight", + "independent", + "noMaterialResponse"] + customData = { + token apiName = "materialSyncMode" + } + ) + float inputs:intensity = 1 ( + displayGroup = "Basic" + displayName = "Intensity" + doc = """Scales the power of the light linearly.""" + customData = { + token apiName = "intensity" + } + ) + float inputs:exposure = 0 ( + displayGroup = "Basic" + displayName = "Exposure" + doc = """Scales the power of the light exponentially as a power + of 2 (similar to an F-stop control over exposure). The result + is multiplied against the intensity.""" + customData = { + token apiName = "exposure" + } + ) + float inputs:diffuse = 1.0 ( + displayGroup = "Refine" + displayName = "Diffuse Multiplier" + doc = """A multiplier for the effect of this light on the diffuse + response of materials. This is a non-physical control.""" + customData = { + token apiName = "diffuse" + } + ) + float inputs:specular = 1.0 ( + displayGroup = "Refine" + displayName = "Specular Multiplier" + doc = """A multiplier for the effect of this light on the specular + response of materials. This is a non-physical control.""" + customData = { + token apiName = "specular" + } + ) + bool inputs:normalize = false ( + displayGroup = "Advanced" + displayName = "Normalize Power" + doc = """Normalizes power by the surface area of the light. + This makes it easier to independently adjust the power and shape + of the light, by causing the power to not vary with the area or + angular size of the light.""" + customData = { + token apiName = "normalize" + } + ) + color3f inputs:color = (1, 1, 1) ( + displayGroup = "Basic" + displayName = "Color" + doc = """The color of emitted light, in energy-linear terms.""" + customData = { + token apiName = "color" + } + ) + bool inputs:enableColorTemperature = false ( + displayGroup = "Basic" + displayName = "Enable Color Temperature" + doc = """Enables using colorTemperature.""" + customData = { + token apiName = "enableColorTemperature" + } + ) + float inputs:colorTemperature = 6500 ( + displayGroup = "Basic" + displayName = "Color Temperature" + doc = """Color temperature, in degrees Kelvin, representing the + white point. The default is a common white point, D65. Lower + values are warmer and higher values are cooler. The valid range + is from 1000 to 10000. Only takes effect when + enableColorTemperature is set to true. When active, the + computed result multiplies against the color attribute. + See UsdLuxBlackbodyTemperatureAsRgb().""" + customData = { + token apiName = "colorTemperature" + } + ) + rel light:filters ( + doc = """Relationship to the light filters that apply to this light.""" + customData = { + token apiName = "filters" + } + ) +} + +class "MeshLightAPI" ( + inherits = + doc = """This is the preferred API schema to apply to + \\ref UsdGeomMesh "Mesh" type prims when adding light behaviors to a mesh. + At its base, this API schema has the built-in behavior of applying LightAPI + to the mesh and overriding the default materialSyncMode to allow the + emission/glow of the bound material to affect the color of the light. + But, it additionally serves as a hook for plugins to attach additional + properties to "mesh lights" through the creation of API schemas which are + authored to auto-apply to MeshLightAPI. + \\see \\ref Usd_AutoAppliedAPISchemas + """ + prepend apiSchemas = ["LightAPI"] +) { + uniform token light:shaderId = "MeshLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + uniform token light:materialSyncMode = "materialGlowTintsLight" ( + customData = { + bool apiSchemaOverride = true + } + ) +} + +class "VolumeLightAPI" ( + inherits = + doc = """This is the preferred API schema to apply to + \\ref UsdVolVolume "Volume" type prims when adding light behaviors to a + volume. At its base, this API schema has the built-in behavior of applying + LightAPI to the volume and overriding the default materialSyncMode to allow + the emission/glow of the bound material to affect the color of the light. + But, it additionally serves as a hook for plugins to attach additional + properties to "volume lights" through the creation of API schemas which are + authored to auto-apply to VolumeLightAPI. + \\see \\ref Usd_AutoAppliedAPISchemas + """ + prepend apiSchemas = ["LightAPI"] +) { + uniform token light:shaderId = "VolumeLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + uniform token light:materialSyncMode = "materialGlowTintsLight" ( + customData = { + bool apiSchemaOverride = true + } + ) +} + +class "LightListAPI" ( + inherits = + doc = """API schema to support discovery and publishing of lights in a scene. + + \\section UsdLuxLightListAPI_Discovery Discovering Lights via Traversal + + To motivate this API, consider what is required to discover all + lights in a scene. We must load all payloads and traverse all prims: + + \\code + 01 // Load everything on the stage so we can find all lights, + 02 // including those inside payloads + 03 stage->Load(); + 04 + 05 // Traverse all prims, checking if they have an applied UsdLuxLightAPI + 06 // (Note: ignoring instancing and a few other things for simplicity) + 07 SdfPathVector lights; + 08 for (UsdPrim prim: stage->Traverse()) { + 09 if (prim.HasAPI()) { + 10 lights.push_back(i->GetPath()); + 11 } + 12 } + \\endcode + + This traversal -- suitably elaborated to handle certain details -- + is the first and simplest thing UsdLuxLightListAPI provides. + UsdLuxLightListAPI::ComputeLightList() performs this traversal and returns + all lights in the scene: + + \\code + 01 UsdLuxLightListAPI listAPI(stage->GetPseudoRoot()); + 02 SdfPathVector lights = listAPI.ComputeLightList(); + \\endcode + + \\section UsdLuxLightListAPI_LightList Publishing a Cached Light List + + Consider a USD client that needs to quickly discover lights but + wants to defer loading payloads and traversing the entire scene + where possible, and is willing to do up-front computation and + caching to achieve that. + + UsdLuxLightListAPI provides a way to cache the computed light list, + by publishing the list of lights onto prims in the model + hierarchy. Consider a big set that contains lights: + + \\code + 01 def Xform "BigSetWithLights" ( + 02 kind = "assembly" + 03 payload = @BigSetWithLights.usd@ // Heavy payload + 04 ) { + 05 // Pre-computed, cached list of lights inside payload + 06 rel lightList = [ + 07 <./Lights/light_1>, + 08 <./Lights/light_2>, + 09 ... + 10 ] + 11 token lightList:cacheBehavior = "consumeAndContinue"; + 12 } + \\endcode + + The lightList relationship encodes a set of lights, and the + lightList:cacheBehavior property provides fine-grained + control over how to use that cache. (See details below.) + + The cache can be created by first invoking + ComputeLightList(ComputeModeIgnoreCache) to pre-compute the list + and then storing the result with UsdLuxLightListAPI::StoreLightList(). + + To enable efficient retrieval of the cache, it should be stored + on a model hierarchy prim. Furthermore, note that while you can + use a UsdLuxLightListAPI bound to the pseudo-root prim to query the + lights (as in the example above) because it will perform a + traversal over descendants, you cannot store the cache back to the + pseduo-root prim. + + To consult the cached list, we invoke + ComputeLightList(ComputeModeConsultModelHierarchyCache): + + \\code + 01 // Find and load all lights, using lightList cache where available + 02 UsdLuxLightListAPI list(stage->GetPseudoRoot()); + 03 SdfPathSet lights = list.ComputeLightList( + 04 UsdLuxLightListAPI::ComputeModeConsultModelHierarchyCache); + 05 stage.LoadAndUnload(lights, SdfPathSet()); + \\endcode + + In this mode, ComputeLightList() will traverse the model + hierarchy, accumulating cached light lists. + + \\section UsdLuxLightListAPI_CacheBehavior Controlling Cache Behavior + + The lightList:cacheBehavior property gives additional fine-grained + control over cache behavior: + + \\li The fallback value, "ignore", indicates that the lightList should + be disregarded. This provides a way to invalidate cache entries. + Note that unless "ignore" is specified, a lightList with an empty + list of targets is considered a cache indicating that no lights + are present. + + \\li The value "consumeAndContinue" indicates that the cache should + be consulted to contribute lights to the scene, and that recursion + should continue down the model hierarchy in case additional lights + are added as descedants. This is the default value established when + StoreLightList() is invoked. This behavior allows the lights within + a large model, such as the BigSetWithLights example above, to be + published outside the payload, while also allowing referencing and + layering to add additional lights over that set. + + \\li The value "consumeAndHalt" provides a way to terminate recursive + traversal of the scene for light discovery. The cache will be + consulted but no descendant prims will be examined. + + \\section UsdLuxLightListAPI_Instancing Instancing + + Where instances are present, UsdLuxLightListAPI::ComputeLightList() will + return the instance-unique paths to any lights discovered within + those instances. Lights within a UsdGeomPointInstancer will + not be returned, however, since they cannot be referred to + solely via paths. +""" +) { + rel lightList ( + doc = """Relationship to lights in the scene.""" + ) + token lightList:cacheBehavior ( + doc = """Controls how the lightList should be interpreted. + Valid values are: + - consumeAndHalt: The lightList should be consulted, + and if it exists, treated as a final authoritative statement + of any lights that exist at or below this prim, halting + recursive discovery of lights. + - consumeAndContinue: The lightList should be consulted, + but recursive traversal over nameChildren should continue + in case additional lights are added by descendants. + - ignore: The lightList should be entirely ignored. This + provides a simple way to temporarily invalidate an existing + cache. This is the fallback behavior. + """ + allowedTokens = ["consumeAndHalt", "consumeAndContinue", "ignore"] + ) +} + +class "ListAPI" ( + inherits = + doc = """ + \\deprecated + Use LightListAPI instead +""" +) { + rel lightList ( + doc = """Relationship to lights in the scene.""" + ) + token lightList:cacheBehavior ( + doc = """Controls how the lightList should be interpreted. + Valid values are: + - consumeAndHalt: The lightList should be consulted, + and if it exists, treated as a final authoritative statement + of any lights that exist at or below this prim, halting + recursive discovery of lights. + - consumeAndContinue: The lightList should be consulted, + but recursive traversal over nameChildren should continue + in case additional lights are added by descendants. + - ignore: The lightList should be entirely ignored. This + provides a simple way to temporarily invalidate an existing + cache. This is the fallback behavior. + """ + allowedTokens = ["consumeAndHalt", "consumeAndContinue", "ignore"] + ) +} + +class "ShapingAPI" ( + inherits = + doc = """Controls for shaping a light's emission.""" + customData = { + string extraIncludes = """ +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" """ + } + +) { + float inputs:shaping:focus = 0 ( + displayGroup = "Shaping" + displayName = "Emission Focus" + doc = """A control to shape the spread of light. Higher focus + values pull light towards the center and narrow the spread. + Implemented as an off-axis cosine power exponent. + TODO: clarify semantics""" + customData = { + token apiName = "shaping:focus" + } + ) + color3f inputs:shaping:focusTint = (0, 0, 0) ( + displayGroup = "Shaping" + displayName = "Emission Focus Tint" + doc = """Off-axis color tint. This tints the emission in the + falloff region. The default tint is black. + TODO: clarify semantics""" + customData = { + token apiName = "shaping:focusTint" + } + ) + float inputs:shaping:cone:angle = 90 ( + displayGroup = "Shaping" + displayName = "Cone Angle" + doc = """Angular limit off the primary axis to restrict the + light spread.""" + customData = { + token apiName = "shaping:cone:angle" + } + ) + float inputs:shaping:cone:softness = 0 ( + displayGroup = "Shaping" + displayName = "Cone Softness" + doc = """Controls the cutoff softness for cone angle. + TODO: clarify semantics""" + customData = { + token apiName = "shaping:cone:softness" + } + ) + asset inputs:shaping:ies:file ( + displayGroup = "Shaping" + displayName = "IES Profile" + doc = """An IES (Illumination Engineering Society) light + profile describing the angular distribution of light.""" + customData = { + token apiName = "shaping:ies:file" + } + ) + float inputs:shaping:ies:angleScale = 0 ( + displayGroup = "Shaping" + displayName = "Profile Scale" + doc = """Rescales the angular distribution of the IES profile. + TODO: clarify semantics""" + customData = { + token apiName = "shaping:ies:angleScale" + } + ) + bool inputs:shaping:ies:normalize = false ( + displayGroup = "Shaping" + displayName = "Profile Normalization" + doc = """Normalizes the IES profile so that it affects the shaping + of the light while preserving the overall energy output.""" + customData = { + token apiName = "shaping:ies:normalize" + } + ) +} + +class "ShadowAPI" ( + inherits = + doc = """Controls to refine a light's shadow behavior. These are + non-physical controls that are valuable for visual lighting work.""" + customData = { + string extraIncludes = """ +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" """ + } + +) { + bool inputs:shadow:enable = true ( + displayGroup = "Shadows" + displayName = "Enable Shadows" + doc = """Enables shadows to be cast by this light.""" + customData = { + token apiName = "shadow:enable" + } + ) + color3f inputs:shadow:color = (0, 0, 0) ( + displayGroup = "Shadows" + displayName = "Shadow Color" + doc = """The color of shadows cast by the light. This is a + non-physical control. The default is to cast black shadows.""" + customData = { + token apiName = "shadow:color" + } + ) + float inputs:shadow:distance = -1.0 ( + displayGroup = "Shadows" + displayName = "Shadow Max Distance" + doc = """The maximum distance shadows are cast. The distance is + measured as the distance between the point on the surface and the + occluder. + The default value (-1) indicates no limit. + """ + customData = { + token apiName = "shadow:distance" + } + ) + float inputs:shadow:falloff = -1.0 ( + displayGroup = "Shadows" + displayName = "Shadow Falloff" + doc = """The size of the shadow falloff zone within the shadow max + distance, which can be used to hide the hard cut-off for shadows seen + stretching past the max distance. The falloff zone is the area that + fades from full shadowing at the beginning of the falloff zone to no + shadowing at the max distance from the occluder. The falloff zone + distance cannot exceed the shadow max distance. A falloff value equal + to or less than zero (with -1 as the default) indicates no falloff. + """ + customData = { + token apiName = "shadow:falloff" + } + ) + float inputs:shadow:falloffGamma = 1.0 ( + displayGroup = "Shadows" + displayName = "Shadow Falloff Gamma" + doc = """A gamma (i.e., exponential) control over shadow strength + with linear distance within the falloff zone. This controls the rate + of the falloff. + This requires the use of shadowDistance and shadowFalloff.""" + customData = { + token apiName = "shadow:falloffGamma" + } + ) +} + +class LightFilter "LightFilter" ( + inherits = + doc = """A light filter modifies the effect of a light. + Lights refer to filters via relationships so that filters may be + shared. + + Linking + + Filters can be linked to geometry. Linking controls which geometry + a light-filter affects, when considering the light filters attached + to a light illuminating the geometry. + + Linking is specified as a collection (UsdCollectionAPI) which can + be accessed via GetFilterLinkCollection(). + """ + customData = { + dictionary extraPlugInfo = { + bool providesUsdShadeConnectableAPIBehavior = 1 + } + string extraIncludes = """ +#include "pxr/usd/usd/collectionAPI.h" +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" """ + } + prepend apiSchemas = ["CollectionAPI:filterLink"] +) { + uniform bool collection:filterLink:includeRoot = 1 ( + customData = { + bool apiSchemaOverride = true + } + ) + uniform token lightFilter:shaderId = "" ( + displayGroup = "Internal" + doc = """Default ID for the light filter's shader. + This defines the shader ID for this light filter when a render context + specific shader ID is not available. + + \\see GetShaderId + \\see GetShaderIdAttrForRenderContext + \\see SdrRegistry::GetShaderNodeByIdentifier + \\see SdrRegistry::GetShaderNodeByIdentifierAndType + """ + customData = { + token apiName = "shaderId" + } + ) + +} + +class "BoundableLightBase" ( + inherits = + doc = """Base class for intrinsic lights that are boundable. + + The primary purpose of this class is to provide a direct API to the + functions provided by LightAPI for concrete derived light types. + """ + customData = { + string extraIncludes = """#include "pxr/usd/usdLux/lightAPI.h" """ + } + prepend apiSchemas = ["LightAPI"] +) { +} + +class "NonboundableLightBase" ( + inherits = + doc = """Base class for intrinsic lights that are not boundable. + + The primary purpose of this class is to provide a direct API to the + functions provided by LightAPI for concrete derived light types. + """ + customData = { + string extraIncludes = """#include "pxr/usd/usdLux/lightAPI.h" """ + } + prepend apiSchemas = ["LightAPI"] +) { +} + +class DistantLight "DistantLight" ( + inherits = + doc = """Light emitted from a distant source along the -Z axis. + Also known as a directional light.""" +) { + uniform token light:shaderId = "DistantLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + float inputs:angle = 0.53 ( + displayGroup = "Basic" + displayName = "Angle Extent" + doc = """Angular size of the light in degrees. + As an example, the Sun is approximately 0.53 degrees as seen from Earth. + Higher values broaden the light and therefore soften shadow edges. + """ + customData = { + token apiName = "angle" + } + ) + float inputs:intensity = 50000 ( + doc = """Scales the emission of the light linearly. + The DistantLight has a high default intensity to approximate the Sun.""" + customData = { + token apiName = "intensity" + bool apiSchemaOverride = true + } + ) +} + +class DiskLight "DiskLight" ( + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = 1 + } + } + inherits = + doc = """Light emitted from one side of a circular disk. + The disk is centered in the XY plane and emits light along the -Z axis.""" +) { + uniform token light:shaderId = "DiskLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + float inputs:radius = 0.5 ( + displayGroup = "Geometry" + displayName = "Radius" + doc = "Radius of the disk." + customData = { + token apiName = "radius" + } + ) +} + +class RectLight "RectLight" ( + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = 1 + } + } + inherits = + doc = """Light emitted from one side of a rectangle. + The rectangle is centered in the XY plane and emits light along the -Z axis. + The rectangle is 1 unit in length in the X and Y axis. In the default + position, a texture file's min coordinates should be at (+X, +Y) and + max coordinates at (-X, -Y).""" +) { + uniform token light:shaderId = "RectLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + float inputs:width = 1 ( + displayGroup = "Geometry" + displayName = "Width" + doc = "Width of the rectangle, in the local X axis." + customData = { + token apiName = "width" + } + + ) + float inputs:height = 1 ( + displayGroup = "Geometry" + displayName = "Height" + doc = "Height of the rectangle, in the local Y axis." + customData = { + token apiName = "height" + } + ) + asset inputs:texture:file ( + displayGroup = "Basic" + displayName = "Color Map" + doc = """A color texture to use on the rectangle.""" + customData = { + token apiName = "textureFile" + } + ) +} + +class SphereLight "SphereLight" ( + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = 1 + } + } + inherits = + doc = """Light emitted outward from a sphere.""" +) { + uniform token light:shaderId = "SphereLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + float inputs:radius = 0.5 ( + displayGroup = "Geometry" + displayName = "Radius" + doc = "Radius of the sphere." + customData = { + token apiName = "radius" + } + ) + bool treatAsPoint = false ( + displayGroup = "Advanced" + displayName = "Treat As Point" + doc = """A hint that this light can be treated as a 'point' + light (effectively, a zero-radius sphere) by renderers that + benefit from non-area lighting. Renderers that only support + area lights can disregard this.""" + ) +} + +class CylinderLight "CylinderLight" ( + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = 1 + } + } + inherits = + doc = """Light emitted outward from a cylinder. + The cylinder is centered at the origin and has its major axis on the X axis. + The cylinder does not emit light from the flat end-caps. + """ +) { + uniform token light:shaderId = "CylinderLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + float inputs:length = 1 ( + displayGroup = "Geometry" + displayName = "Length" + doc = "Length of the cylinder, in the local X axis." + customData = { + token apiName = "length" + } + ) + float inputs:radius = 0.5 ( + displayGroup = "Geometry" + displayName = "Radius" + doc = "Radius of the cylinder." + customData = { + token apiName = "radius" + } + ) + bool treatAsLine = false ( + displayGroup = "Advanced" + displayName = "Treat As Line" + doc = """A hint that this light can be treated as a 'line' + light (effectively, a zero-radius cylinder) by renderers that + benefit from non-area lighting. Renderers that only support + area lights can disregard this.""" + ) +} + +class GeometryLight "GeometryLight" ( + inherits = + doc = """\\deprecated + Light emitted outward from a geometric prim (UsdGeomGprim), + which is typically a mesh.""" +) { + rel geometry ( + doc = """Relationship to the geometry to use as the light source.""" + ) + uniform token light:shaderId = "GeometryLight" ( + customData = { + bool apiSchemaOverride = true + } + ) +} + +class DomeLight "DomeLight" ( + inherits = + doc = """Light emitted inward from a distant external environment, + such as a sky or IBL light probe. The orientation of a dome light with a + latlong texture is expected to match the OpenEXR specification for latlong + environment maps. From the OpenEXR documentation: + + ------------------------------------------------------------------------- + Latitude-Longitude Map: + + The environment is projected onto the image using polar coordinates + (latitude and longitude). A pixel's x coordinate corresponds to + its longitude, and the y coordinate corresponds to its latitude. + Pixel (dataWindow.min.x, dataWindow.min.y) has latitude +pi/2 and + longitude +pi; pixel (dataWindow.max.x, dataWindow.max.y) has + latitude -pi/2 and longitude -pi. + + In 3D space, latitudes -pi/2 and +pi/2 correspond to the negative and + positive y direction. Latitude 0, longitude 0 points into positive + z direction; and latitude 0, longitude pi/2 points into positive x + direction. + + The size of the data window should be 2*N by N pixels (width by height), + where N can be any integer greater than 0. + ------------------------------------------------------------------------- +""" +) { + uniform token light:shaderId = "DomeLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + asset inputs:texture:file ( + displayGroup = "Basic" + displayName = "Color Map" + doc = """A color texture to use on the dome, such as an HDR (high + dynamic range) texture intended for IBL (image based lighting).""" + customData = { + token apiName = "textureFile" + } + ) + token inputs:texture:format = "automatic" ( + displayGroup = "Basic" + displayName = "Color Map Format" + allowedTokens = ["automatic", "latlong", "mirroredBall", "angular", "cubeMapVerticalCross"] + doc = """Specifies the parameterization of the color map file. + Valid values are: + - automatic: Tries to determine the layout from the file itself. + For example, Renderman texture files embed an explicit + parameterization. + - latlong: Latitude as X, longitude as Y. + - mirroredBall: An image of the environment reflected in a + sphere, using an implicitly orthogonal projection. + - angular: Similar to mirroredBall but the radial dimension + is mapped linearly to the angle, providing better sampling + at the edges. + - cubeMapVerticalCross: A cube map with faces laid out as a + vertical cross. + """ + customData = { + token apiName = "textureFormat" + } + ) + rel portals ( + doc = """Optional portals to guide light sampling.""" + ) + float guideRadius = 1.0e5 ( + displayGroup = "Guides" + displayName = "Radius" + doc = """The radius of guide geometry to use to visualize the dome light. The default is 1 km for scenes whose metersPerUnit is the USD default of 0.01 (i.e., 1 world unit is 1 cm).""" + ) +} + +class PortalLight "PortalLight" ( + inherits = + doc = """A rectangular portal in the local XY plane that guides sampling + of a dome light. Transmits light in the -Z direction. + The rectangle is 1 unit in length.""" +) { + uniform token light:shaderId = "PortalLight" ( + customData = { + bool apiSchemaOverride = true + } + ) + float3[] extent = [(-0.5, -0.5, 0.0), (0.5, 0.5, 0.0)] ( + doc = """Boundary extent of the unit rectangle in the XY plane that + defines the portal.""" + customData = { + token apiName = "" + } + ) +} + +class PluginLight "PluginLight" ( + inherits = + doc = """Light that provides properties that allow it to identify an + external SdrShadingNode definition, through UsdShadeNodeDefAPI, that can be + provided to render delegates without the need to provide a schema + definition for the light's type. + + \\see \\ref usdLux_PluginSchemas +""" + prepend apiSchemas = ["NodeDefAPI", "LightAPI"] + customData = { + string extraIncludes = """ +#include "pxr/usd/usdShade/nodeDefAPI.h" """ + } +) { +} + +class PluginLightFilter "PluginLightFilter" ( + inherits = + doc = """Light filter that provides properties that allow it to identify an + external SdrShadingNode definition, through UsdShadeNodeDefAPI, that can be + provided to render delegates without the need to provide a schema + definition for the light filter's type. + + \\see \\ref usdLux_PluginSchemas +""" + prepend apiSchemas = ["NodeDefAPI"] + customData = { + string extraIncludes = """ +#include "pxr/usd/usdShade/nodeDefAPI.h" """ + } +) { +} diff --git a/blender/lib/usd/usdMedia/resources/generatedSchema.usda b/blender/lib/usd/usdMedia/resources/generatedSchema.usda new file mode 100644 index 0000000..9faec1e --- /dev/null +++ b/blender/lib/usd/usdMedia/resources/generatedSchema.usda @@ -0,0 +1,232 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class SpatialAudio "SpatialAudio" ( + doc = """The SpatialAudio primitive defines basic properties for encoding + playback of an audio file or stream within a USD Stage. The SpatialAudio + schema derives from UsdGeomXformable since it can support full spatial + audio while also supporting non-spatial mono and stereo sounds. One or + more SpatialAudio prims can be placed anywhere in the namespace, though it + is advantageous to place truly spatial audio prims under/inside the models + from which the sound emanates, so that the audio prim need only be + transformed relative to the model, rather than copying its animation. + + Timecode Attributes and Time Scaling + \\a startTime and \\a endTime are \"timecode\" valued + attributes which gives them the special behavior that + \"layer offsets\" affecting the layer in + which one of these values is authored are applied to the attribute's value + itself during value resolution. This allows audio playback to be kept in + sync with time sampled animation as the animation is affected by + \"layer offsets\" in the composition. But this behavior + brings with it some interesting edge cases and caveats when it comes to + \"layer offsets\" that include scale. + + #### Layer Offsets do not affect Media Dilation + Although authored layer offsets may have a time scale which can scale the + duration between an authored \\a startTime and \\a endTime, we make no + attempt to infer any playback dilation of the actual audio media itself. + Given that \\a startTime and \\a endTime can be independently authored in + different layers with differing time scales, it is not possible, in general, + to define an \"original timeframe\" from which we can compute a dilation to + composed stage-time. Even if we could compute a composed dilation this way, + it would still be impossible to flatten a stage or layer stack into a single + layer and still retain the composed audio dilation using this schema. + + #### Inverting startTime and endTime + Although we do not expect it to be common, it is possible to apply a + negative time scale to USD layers, which mostly has the effect of reversing + animation in the affected composition. If a negative scale is applied to a + composition that contains authored \\a startTime and \\a endTime, it will + reverse their relative ordering in time. Therefore, we stipulate when + \\a playbackMode is \"onceFromStartToEnd\" or \"loopFromStartToEnd\", if + \\a endTime is less than \\a startTime, then begin playback at \\a endTime, + and continue until \\a startTime. When \\a startTime and \\a endTime are + inverted, we do not, however, stipulate that playback of the audio media + itself be inverted, since doing so \"successfully\" would require perfect + knowledge of when, within the audio clip, relevant audio ends (so that we + know how to offset the reversed audio to align it so that we reach the + \"beginning\" at \\a startTime), and sounds played in reverse are not likely + to produce desirable results. + """ +) +{ + uniform token auralMode = "spatial" ( + allowedTokens = ["spatial", "nonSpatial"] + doc = """Determines how audio should be played. + Valid values are: + - spatial: Play the audio in 3D space if the device can support spatial + audio. if not, fall back to mono. + - nonSpatial: Play the audio without regard to the SpatialAudio prim's + position. If the audio media contains any form of stereo or other + multi-channel sound, it is left to the application to determine + whether the listener's position should be taken into account. We + expect nonSpatial to be the choice for ambient sounds and music + sound-tracks. + """ + ) + uniform timecode endTime = 0 ( + doc = """Expressed in the timeCodesPerSecond of the containing stage, + \\a endTime specifies when the audio stream will cease playing during + animation playback if the length of the referenced audio clip is + longer than desired. This only applies if \\a playbackMode is set to + onceFromStartToEnd or loopFromStartToEnd, otherwise the \\a endTimeCode + of the stage is used instead of \\a endTime. + If \\a endTime is less than \\a startTime, it is expected that the audio + will instead be played from \\a endTime to \\a startTime. + Note that \\a endTime is expressed as a timecode so that the stage can + properly apply layer offsets when resolving its value. + See for more details and caveats. + """ + ) + uniform asset filePath = @@ ( + doc = '''Path to the audio file. + In general, the formats allowed for audio files is no more constrained + by USD than is image-type. As with images, however, usdz has stricter + requirements based on DMA and format support in browsers and consumer + devices. The allowed audio filetypes for usdz are M4A, MP3, WAV + (in order of preference). + \\sa Usdz Specification + ''' + ) + double gain = 1 ( + doc = '''Multiplier on the incoming audio signal. A value of 0 "mutes" + the signal. Negative values will be clamped to 0. + ''' + ) + uniform double mediaOffset = 0 ( + doc = """Expressed in seconds, \\a mediaOffset specifies the offset from + the referenced audio file's beginning at which we should begin playback + when stage playback reaches the time that prim's audio should start. + If the prim's \\a playbackMode is a looping mode, \\a mediaOffset is + applied only to the first run-through of the audio clip; the second and + all other loops begin from the start of the audio clip. + """ + ) + uniform token playbackMode = "onceFromStart" ( + allowedTokens = ["onceFromStart", "onceFromStartToEnd", "loopFromStart", "loopFromStartToEnd", "loopFromStage"] + doc = """Along with \\a startTime and \\a endTime, determines when the + audio playback should start and stop during the stage's animation + playback and whether the audio should loop during its duration. + Valid values are: + - onceFromStart: Play the audio once, starting at \\a startTime, + continuing until the audio completes. + - onceFromStartToEnd: Play the audio once beginning at \\a startTime, + continuing until \\a endTime or until the audio completes, whichever + comes first. + - loopFromStart: Start playing the audio at \\a startTime and continue + looping through to the stage's authored \\a endTimeCode. + - loopFromStartToEnd: Start playing the audio at \\a startTime and + continue looping through, stopping the audio at \\a endTime. + - loopFromStage: Start playing the audio at the stage's authored + \\a startTimeCode and continue looping through to the stage's authored + \\a endTimeCode. This can be useful for ambient sounds that should always + be active. + """ + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + uniform timecode startTime = 0 ( + doc = """Expressed in the timeCodesPerSecond of the containing stage, + \\a startTime specifies when the audio stream will start playing during + animation playback. This value is ignored when \\a playbackMode is set + to loopFromStage as, in this mode, the audio will always start at the + stage's authored \\a startTimeCode. + Note that \\a startTime is expressed as a timecode so that the stage can + properly apply layer offsets when resolving its value. See + for more details and caveats. + """ + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "AssetPreviewsAPI" ( + doc = """AssetPreviewsAPI is the interface for authoring and accessing + precomputed, lightweight previews of assets. It is an applied schema, + which means that an arbitrary number of prims on a stage can have the schema + applied and therefore can contain previews; however, to access a stage's + \"default\" previews, one consults + the stage's `defaultPrim`. + + AssetPreviewsAPI supports the following kinds of previews: + - **thumbnails** : a set of pre-rendered images of the asset. There is no + prescribed size for thumbnail images, but care should be taken to ensure + their inclusion does not substantially increase the overall size of an + asset, as, for example, when packaged into USDZ. + + Although the UsdMediaAssetPreviewsAPI class can be used to interrogate any + prim, no query in the API will succeed unless the schema has been applied + to the prim. This schema deals only with asset paths, and clients wishing + to directly consume the returned data must do so by retrieving an ArAsset + from the session's ArAssetResolver. + + The schema defines no properties or metadata fallback values. Rather, + Asset Previews are encoded as part of a prim's `assetInfo` metadata. A + default thumbnail image would look like: + ``` +1. assetInfo = { +2. dictionary previews = { +3. dictionary thumbnails = { +4. dictionary default = { +5. asset defaultImage = @chair_thumb.jpg@ +6. } +7. } +8. } +9. } + ``` + + """ +) +{ +} + diff --git a/blender/lib/usd/usdMedia/resources/plugInfo.json b/blender/lib/usd/usdMedia/resources/plugInfo.json new file mode 100644 index 0000000..cf2fc02 --- /dev/null +++ b/blender/lib/usd/usdMedia/resources/plugInfo.json @@ -0,0 +1,38 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdMediaAssetPreviewsAPI": { + "alias": { + "UsdSchemaBase": "AssetPreviewsAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdMediaSpatialAudio": { + "alias": { + "UsdSchemaBase": "SpatialAudio" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "schemaKind": "concreteTyped" + } + } + }, + "LibraryPath": "", + "Name": "usdMedia", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdMedia/resources/usdMedia/schema.usda b/blender/lib/usd/usdMedia/resources/usdMedia/schema.usda new file mode 100644 index 0000000..3f35526 --- /dev/null +++ b/blender/lib/usd/usdMedia/resources/usdMedia/schema.usda @@ -0,0 +1,230 @@ +#usda 1.0 +( + "This file describes the USD Media schemata for code generation." + subLayers = [ + @usdGeom/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdMedia" + string libraryPath = "pxr/usd/usdMedia" + } +) +{ +} + +class SpatialAudio "SpatialAudio" ( + inherits = + doc = """The SpatialAudio primitive defines basic properties for encoding + playback of an audio file or stream within a USD Stage. The SpatialAudio + schema derives from UsdGeomXformable since it can support full spatial + audio while also supporting non-spatial mono and stereo sounds. One or + more SpatialAudio prims can be placed anywhere in the namespace, though it + is advantageous to place truly spatial audio prims under/inside the models + from which the sound emanates, so that the audio prim need only be + transformed relative to the model, rather than copying its animation. + + \\section Usd_SpatialAudio_TimeScaling Timecode Attributes and Time Scaling + \\a startTime and \\a endTime are \\ref SdfTimeCode "timecode" valued + attributes which gives them the special behavior that + \\ref SdfLayerOffset "layer offsets" affecting the layer in + which one of these values is authored are applied to the attribute's value + itself during value resolution. This allows audio playback to be kept in + sync with time sampled animation as the animation is affected by + \\ref SdfLayerOffset "layer offsets" in the composition. But this behavior + brings with it some interesting edge cases and caveats when it comes to + \\ref SdfLayerOffset "layer offsets" that include scale. + + #### Layer Offsets do not affect Media Dilation + Although authored layer offsets may have a time scale which can scale the + duration between an authored \\a startTime and \\a endTime, we make no + attempt to infer any playback dilation of the actual audio media itself. + Given that \\a startTime and \\a endTime can be independently authored in + different layers with differing time scales, it is not possible, in general, + to define an "original timeframe" from which we can compute a dilation to + composed stage-time. Even if we could compute a composed dilation this way, + it would still be impossible to flatten a stage or layer stack into a single + layer and still retain the composed audio dilation using this schema. + + #### Inverting startTime and endTime + Although we do not expect it to be common, it is possible to apply a + negative time scale to USD layers, which mostly has the effect of reversing + animation in the affected composition. If a negative scale is applied to a + composition that contains authored \\a startTime and \\a endTime, it will + reverse their relative ordering in time. Therefore, we stipulate when + \\a playbackMode is "onceFromStartToEnd" or "loopFromStartToEnd", if + \\a endTime is less than \\a startTime, then begin playback at \\a endTime, + and continue until \\a startTime. When \\a startTime and \\a endTime are + inverted, we do not, however, stipulate that playback of the audio media + itself be inverted, since doing so "successfully" would require perfect + knowledge of when, within the audio clip, relevant audio ends (so that we + know how to offset the reversed audio to align it so that we reach the + "beginning" at \\a startTime), and sounds played in reverse are not likely + to produce desirable results. + """ +) { + uniform asset filePath = @@ ( + doc = """Path to the audio file. + In general, the formats allowed for audio files is no more constrained + by USD than is image-type. As with images, however, usdz has stricter + requirements based on DMA and format support in browsers and consumer + devices. The allowed audio filetypes for usdz are M4A, MP3, WAV + (in order of preference). + \\sa Usdz Specification + """ + ) + uniform token auralMode = "spatial" ( + allowedTokens = ["spatial", "nonSpatial"] + doc = """Determines how audio should be played. + Valid values are: + - spatial: Play the audio in 3D space if the device can support spatial + audio. if not, fall back to mono. + - nonSpatial: Play the audio without regard to the SpatialAudio prim's + position. If the audio media contains any form of stereo or other + multi-channel sound, it is left to the application to determine + whether the listener's position should be taken into account. We + expect nonSpatial to be the choice for ambient sounds and music + sound-tracks. + """ + ) + uniform token playbackMode = "onceFromStart" ( + allowedTokens = ["onceFromStart", "onceFromStartToEnd", "loopFromStart", + "loopFromStartToEnd", "loopFromStage"] + doc = """Along with \\a startTime and \\a endTime, determines when the + audio playback should start and stop during the stage's animation + playback and whether the audio should loop during its duration. + Valid values are: + - onceFromStart: Play the audio once, starting at \\a startTime, + continuing until the audio completes. + - onceFromStartToEnd: Play the audio once beginning at \\a startTime, + continuing until \\a endTime or until the audio completes, whichever + comes first. + - loopFromStart: Start playing the audio at \\a startTime and continue + looping through to the stage's authored \\a endTimeCode. + - loopFromStartToEnd: Start playing the audio at \\a startTime and + continue looping through, stopping the audio at \\a endTime. + - loopFromStage: Start playing the audio at the stage's authored + \\a startTimeCode and continue looping through to the stage's authored + \\a endTimeCode. This can be useful for ambient sounds that should always + be active. + """ + ) + uniform timecode startTime = 0 ( + doc = """Expressed in the timeCodesPerSecond of the containing stage, + \\a startTime specifies when the audio stream will start playing during + animation playback. This value is ignored when \\a playbackMode is set + to loopFromStage as, in this mode, the audio will always start at the + stage's authored \\a startTimeCode. + Note that \\a startTime is expressed as a timecode so that the stage can + properly apply layer offsets when resolving its value. See + \\ref Usd_SpatialAudio_TimeScaling for more details and caveats. + """ + ) + uniform timecode endTime = 0 ( + doc = """Expressed in the timeCodesPerSecond of the containing stage, + \\a endTime specifies when the audio stream will cease playing during + animation playback if the length of the referenced audio clip is + longer than desired. This only applies if \\a playbackMode is set to + onceFromStartToEnd or loopFromStartToEnd, otherwise the \\a endTimeCode + of the stage is used instead of \\a endTime. + If \\a endTime is less than \\a startTime, it is expected that the audio + will instead be played from \\a endTime to \\a startTime. + Note that \\a endTime is expressed as a timecode so that the stage can + properly apply layer offsets when resolving its value. + See \\ref Usd_SpatialAudio_TimeScaling for more details and caveats. + """ + ) + uniform double mediaOffset = 0 ( + doc = """Expressed in seconds, \\a mediaOffset specifies the offset from + the referenced audio file's beginning at which we should begin playback + when stage playback reaches the time that prim's audio should start. + If the prim's \\a playbackMode is a looping mode, \\a mediaOffset is + applied only to the first run-through of the audio clip; the second and + all other loops begin from the start of the audio clip. + """ + ) + double gain = 1.0 ( + doc = """Multiplier on the incoming audio signal. A value of 0 "mutes" + the signal. Negative values will be clamped to 0. + """ + ) +} + +class "AssetPreviewsAPI" ( + inherits = + doc = """AssetPreviewsAPI is the interface for authoring and accessing + precomputed, lightweight previews of assets. It is an applied schema, + which means that an arbitrary number of prims on a stage can have the schema + applied and therefore can contain previews; however, to access a stage's + "default" previews, one consults + the stage's `defaultPrim`. + + AssetPreviewsAPI supports the following kinds of previews: + - **thumbnails** : a set of pre-rendered images of the asset. There is no + prescribed size for thumbnail images, but care should be taken to ensure + their inclusion does not substantially increase the overall size of an + asset, as, for example, when packaged into USDZ. + + Although the UsdMediaAssetPreviewsAPI class can be used to interrogate any + prim, no query in the API will succeed unless the schema has been applied + to the prim. This schema deals only with asset paths, and clients wishing + to directly consume the returned data must do so by retrieving an ArAsset + from the session's ArAssetResolver. + + The schema defines no properties or metadata fallback values. Rather, + Asset Previews are encoded as part of a prim's `assetInfo` metadata. A + default thumbnail image would look like: + ``` +1. assetInfo = { +2. dictionary previews = { +3. dictionary thumbnails = { +4. dictionary default = { +5. asset defaultImage = @chair_thumb.jpg@ +6. } +7. } +8. } +9. } + ``` + + """ + customData = { + token apiSchemaType = "singleApply" + string extraIncludes = """ +#include "pxr/usd/sdf/types.h" + """ + dictionary schemaTokens = { + dictionary previews = { + string doc = """Dictionary key in the assetInfo dictionary + for asset previews sub-dictionary. + """ + } + dictionary thumbnails = { + string doc = """Dictionary key in the assetInfo["previews"] + dictionary for thumbnails previews sub-dictionary. + """ + } + dictionary defaultImage = { + string doc = """Dictionary key in a Thumbnails dictionary for + the default thumbnail image. + """ + } + dictionary previewThumbnails = { + string value = "previews:thumbnails" + string doc = """Full key in the assetInfo dictionary for + thumbnails previews dictionary. + """ + } + dictionary previewThumbnailsDefault = { + string value = "previews:thumbnails:default" + string doc = """Full key in the assetInfo dictionary for + the "default" thumbnails in the previews dictionary. + """ + } + } + } +) +{ + # no properties! +} diff --git a/blender/lib/usd/usdMtlx/resources/plugInfo.json b/blender/lib/usd/usdMtlx/resources/plugInfo.json new file mode 100644 index 0000000..4bee45b --- /dev/null +++ b/blender/lib/usd/usdMtlx/resources/plugInfo.json @@ -0,0 +1,37 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdMtlxFileFormat": { + "bases": [ + "SdfFileFormat" + ], + "displayName": "USD MaterialX File Format", + "extensions": [ + "mtlx" + ], + "formatId": "mtlx", + "primary": true, + "supportsEditing": false, + "supportsWriting": false, + "target": "usd" + }, + "UsdMtlxDiscoveryPlugin" : { + "bases": ["NdrDiscoveryPlugin"], + "displayName": "MaterialX Discovery" + }, + "UsdMtlxParserPlugin" : { + "bases": ["NdrParserPlugin"], + "displayName": "MaterialX Node Parser" + } + } + }, + "LibraryPath": "", + "Name": "usdMtlx", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdPhysics/resources/generatedSchema.usda b/blender/lib/usd/usdPhysics/resources/generatedSchema.usda new file mode 100644 index 0000000..5d867cc --- /dev/null +++ b/blender/lib/usd/usdPhysics/resources/generatedSchema.usda @@ -0,0 +1,953 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class PhysicsScene "PhysicsScene" ( + doc = "General physics simulation properties, required for simulation." +) +{ + vector3f physics:gravityDirection = (0, 0, 0) ( + displayName = "Gravity Direction" + doc = """Gravity direction vector in simulation world space. Will be + normalized before use. A zero vector is a request to use the negative + upAxis. Unitless.""" + ) + float physics:gravityMagnitude = -inf ( + displayName = "Gravity Magnitude" + doc = """Gravity acceleration magnitude in simulation world space. + A negative value is a request to use a value equivalent to earth + gravity regardless of the metersPerUnit scaling used by this scene. + Units: distance/second/second.""" + ) +} + +class "PhysicsRigidBodyAPI" ( + doc = """Applies physics body attributes to any UsdGeomXformable prim and + marks that prim to be driven by a simulation. If a simulation is running + it will update this prim's pose. All prims in the hierarchy below this + prim should move accordingly.""" +) +{ + vector3f physics:angularVelocity = (0, 0, 0) ( + displayName = "Angular Velocity" + doc = """Angular velocity in the same space as the node's xform. + Units: degrees/second.""" + ) + bool physics:kinematicEnabled = 0 ( + displayName = "Kinematic Enabled" + doc = """Determines whether the body is kinematic or not. A kinematic + body is a body that is moved through animated poses or through + user defined poses. The simulation derives velocities for the + kinematic body based on the external motion. When a continuous motion + is not desired, this kinematic flag should be set to false.""" + ) + bool physics:rigidBodyEnabled = 1 ( + displayName = "Rigid Body Enabled" + doc = "Determines if this PhysicsRigidBodyAPI is enabled." + ) + rel physics:simulationOwner ( + displayName = "Simulation Owner" + doc = """Single PhysicsScene that will simulate this body. By + default this is the first PhysicsScene found in the stage using + UsdStage::Traverse().""" + ) + uniform bool physics:startsAsleep = 0 ( + displayName = "Starts as Asleep" + doc = "Determines if the body is asleep when the simulation starts." + ) + vector3f physics:velocity = (0, 0, 0) ( + displayName = "Linear Velocity" + doc = """Linear velocity in the same space as the node's xform. + Units: distance/second.""" + ) +} + +class "PhysicsMassAPI" ( + doc = """Defines explicit mass properties (mass, density, inertia etc.). + MassAPI can be applied to any object that has a PhysicsCollisionAPI or + a PhysicsRigidBodyAPI.""" +) +{ + point3f physics:centerOfMass = (-inf, -inf, -inf) ( + displayName = "Center of Mass" + doc = "Center of mass in the prim's local space. Units: distance." + ) + float physics:density = 0 ( + displayName = "Density" + doc = """If non-zero, specifies the density of the object. + In the context of rigid body physics, density indirectly results in + setting mass via (mass = density x volume of the object). How the + volume is computed is up to implementation of the physics system. + It is generally computed from the collision approximation rather than + the graphical mesh. In the case where both density and mass are + specified for the same object, mass has precedence over density. + Unlike mass, child's prim's density overrides parent prim's density + as it is accumulative. Note that density of a collisionAPI can be also + alternatively set through a PhysicsMaterialAPI. The material density + has the weakest precedence in density definition. Note if density is + 0.0 it is ignored. Units: mass/distance/distance/distance.""" + ) + float3 physics:diagonalInertia = (0, 0, 0) ( + displayName = "Diagonal Inertia" + doc = """If non-zero, specifies diagonalized inertia tensor along the + principal axes. Note if diagonalInertial is (0.0, 0.0, 0.0) it is + ignored. Units: mass*distance*distance.""" + ) + float physics:mass = 0 ( + displayName = "Mass" + doc = """If non-zero, directly specifies the mass of the object. + Note that any child prim can also have a mass when they apply massAPI. + In this case, the precedence rule is 'parent mass overrides the + child's'. This may come as counter-intuitive, but mass is a computed + quantity and in general not accumulative. For example, if a parent + has mass of 10, and one of two children has mass of 20, allowing + child's mass to override its parent results in a mass of -10 for the + other child. Note if mass is 0.0 it is ignored. Units: mass. + """ + ) + quatf physics:principalAxes = (0, 0, 0, 0) ( + displayName = "Principal Axes" + doc = """Orientation of the inertia tensor's principal axes in the + prim's local space.""" + ) +} + +class "PhysicsCollisionAPI" ( + doc = """Applies collision attributes to a UsdGeomXformable prim. If a + simulation is running, this geometry will collide with other geometries that + have PhysicsCollisionAPI applied. If a prim in the parent hierarchy has the + RigidBodyAPI applied, this collider is a part of that body. If there is + no body in the parent hierarchy, this collider is considered to be static.""" +) +{ + bool physics:collisionEnabled = 1 ( + displayName = "Collision Enabled" + doc = "Determines if the PhysicsCollisionAPI is enabled." + ) + rel physics:simulationOwner ( + displayName = "Simulation Owner" + doc = """Single PhysicsScene that will simulate this collider. + By default this object belongs to the first PhysicsScene. + Note that if a RigidBodyAPI in the hierarchy above has a different + simulationOwner then it has a precedence over this relationship.""" + ) +} + +class "PhysicsMeshCollisionAPI" ( + doc = """Attributes to control how a Mesh is made into a collider. + Can be applied to only a USDGeomMesh in addition to its + PhysicsCollisionAPI.""" +) +{ + uniform token physics:approximation = "none" ( + allowedTokens = ["none", "convexDecomposition", "convexHull", "boundingSphere", "boundingCube", "meshSimplification"] + displayName = "Approximation" + doc = """Determines the mesh's collision approximation: +\t\"none\" - The mesh geometry is used directly as a collider without any + approximation. +\t\"convexDecomposition\" - A convex mesh decomposition is performed. This + results in a set of convex mesh colliders. +\t\"convexHull\" - A convex hull of the mesh is generated and used as the + collider. +\t\"boundingSphere\" - A bounding sphere is computed around the mesh and used + as a collider. +\t\"boundingCube\" - An optimally fitting box collider is computed around the + mesh. +\t\"meshSimplification\" - A mesh simplification step is performed, resulting + in a simplified triangle mesh collider.""" + ) +} + +class "PhysicsMaterialAPI" ( + doc = """ Adds simulation material properties to a Material. All collisions + that have a relationship to this material will have their collision response + defined through this material.""" +) +{ + float physics:density = 0 ( + displayName = "Density" + doc = """If non-zero, defines the density of the material. This can be + used for body mass computation, see PhysicsMassAPI. + Note that if the density is 0.0 it is ignored. + Units: mass/distance/distance/distance.""" + ) + float physics:dynamicFriction = 0 ( + displayName = "Dynamic Friction" + doc = "Dynamic friction coefficient. Unitless." + ) + float physics:restitution = 0 ( + displayName = "Restitution" + doc = "Restitution coefficient. Unitless." + ) + float physics:staticFriction = 0 ( + displayName = "Static Friction" + doc = "Static friction coefficient. Unitless." + ) +} + +class PhysicsCollisionGroup "PhysicsCollisionGroup" ( + apiSchemas = ["CollectionAPI:colliders"] + doc = """Defines a collision group for coarse filtering. When a collision + occurs between two objects that have a PhysicsCollisionGroup assigned, + they will collide with each other unless this PhysicsCollisionGroup pair + is filtered. See filteredGroups attribute. + + A CollectionAPI:colliders maintains a list of PhysicsCollisionAPI rel-s that + defines the members of this Collisiongroup. + """ +) +{ + rel physics:filteredGroups ( + displayName = "Filtered Groups" + doc = """References a list of PhysicsCollisionGroups with which + collisions should be ignored.""" + ) + bool physics:invertFilteredGroups ( + displayName = "Invert Filtered Groups" + doc = """Normally, the filter will disable collisions against the selected + filter groups. However, if this option is set, the filter will disable + collisions against all colliders except for those in the selected filter + groups.""" + ) + string physics:mergeGroup ( + displayName = "Merge With Groups" + doc = """If non-empty, any collision groups in a stage with a matching + mergeGroup should be considered to refer to the same collection. Matching + collision groups should behave as if there were a single group containing + referenced colliders and filter groups from both collections.""" + ) +} + +class "PhysicsFilteredPairsAPI" ( + doc = '''API to describe fine-grained filtering. If a collision between + two objects occurs, this pair might be filtered if the pair is defined + through this API. This API can be applied either to a body or collision + or even articulation. The "filteredPairs" defines what objects it should + not collide against. Note that FilteredPairsAPI filtering has precedence + over CollisionGroup filtering.''' +) +{ + rel physics:filteredPairs ( + displayName = "Filtered Pairs" + doc = "Relationship to objects that should be filtered." + ) +} + +class PhysicsJoint "PhysicsJoint" ( + doc = """A joint constrains the movement of rigid bodies. Joint can be + created between two rigid bodies or between one rigid body and world. + By default joint primitive defines a D6 joint where all degrees of + freedom are free. Three linear and three angular degrees of freedom. + Note that default behavior is to disable collision between jointed bodies. + """ +) +{ + rel physics:body0 ( + displayName = "Body 0" + doc = "Relationship to any UsdGeomXformable." + ) + rel physics:body1 ( + displayName = "Body 1" + doc = "Relationship to any UsdGeomXformable." + ) + float physics:breakForce = inf ( + displayName = "Break Force" + doc = """Joint break force. If set, joint is to break when this force + limit is reached. (Used for linear DOFs.) + Units: mass * distance / second / second""" + ) + float physics:breakTorque = inf ( + displayName = "Break Torque" + doc = """Joint break torque. If set, joint is to break when this torque + limit is reached. (Used for angular DOFs.) + Units: mass * distance * distance / second / second""" + ) + bool physics:collisionEnabled = 0 ( + displayName = "Collision Enabled" + doc = "Determines if the jointed subtrees should collide or not." + ) + uniform bool physics:excludeFromArticulation = 0 ( + displayName = "Exclude From Articulation" + doc = "Determines if the joint can be included in an Articulation." + ) + bool physics:jointEnabled = 1 ( + displayName = "Joint Enabled" + doc = "Determines if the joint is enabled." + ) + point3f physics:localPos0 = (0, 0, 0) ( + displayName = "Local Position 0" + doc = "Relative position of the joint frame to body0's frame." + ) + point3f physics:localPos1 = (0, 0, 0) ( + displayName = "Local Position 1" + doc = "Relative position of the joint frame to body1's frame." + ) + quatf physics:localRot0 = (1, 0, 0, 0) ( + displayName = "Local Rotation 0" + doc = "Relative orientation of the joint frame to body0's frame." + ) + quatf physics:localRot1 = (1, 0, 0, 0) ( + displayName = "Local Rotation 1" + doc = "Relative orientation of the joint frame to body1's frame." + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class PhysicsRevoluteJoint "PhysicsRevoluteJoint" ( + doc = """Predefined revolute joint type (rotation along revolute joint + axis is permitted.)""" +) +{ + uniform token physics:axis = "X" ( + allowedTokens = ["X", "Y", "Z"] + displayName = "Axis" + doc = "Joint axis." + ) + rel physics:body0 ( + displayName = "Body 0" + doc = "Relationship to any UsdGeomXformable." + ) + rel physics:body1 ( + displayName = "Body 1" + doc = "Relationship to any UsdGeomXformable." + ) + float physics:breakForce = inf ( + displayName = "Break Force" + doc = """Joint break force. If set, joint is to break when this force + limit is reached. (Used for linear DOFs.) + Units: mass * distance / second / second""" + ) + float physics:breakTorque = inf ( + displayName = "Break Torque" + doc = """Joint break torque. If set, joint is to break when this torque + limit is reached. (Used for angular DOFs.) + Units: mass * distance * distance / second / second""" + ) + bool physics:collisionEnabled = 0 ( + displayName = "Collision Enabled" + doc = "Determines if the jointed subtrees should collide or not." + ) + uniform bool physics:excludeFromArticulation = 0 ( + displayName = "Exclude From Articulation" + doc = "Determines if the joint can be included in an Articulation." + ) + bool physics:jointEnabled = 1 ( + displayName = "Joint Enabled" + doc = "Determines if the joint is enabled." + ) + point3f physics:localPos0 = (0, 0, 0) ( + displayName = "Local Position 0" + doc = "Relative position of the joint frame to body0's frame." + ) + point3f physics:localPos1 = (0, 0, 0) ( + displayName = "Local Position 1" + doc = "Relative position of the joint frame to body1's frame." + ) + quatf physics:localRot0 = (1, 0, 0, 0) ( + displayName = "Local Rotation 0" + doc = "Relative orientation of the joint frame to body0's frame." + ) + quatf physics:localRot1 = (1, 0, 0, 0) ( + displayName = "Local Rotation 1" + doc = "Relative orientation of the joint frame to body1's frame." + ) + float physics:lowerLimit = -inf ( + displayName = "Lower Limit" + doc = """Lower limit. Units: degrees. -inf means not limited in + negative direction.""" + ) + float physics:upperLimit = inf ( + displayName = "Upper Limit" + doc = """Upper limit. Units: degrees. inf means not limited in + positive direction.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class PhysicsPrismaticJoint "PhysicsPrismaticJoint" ( + doc = """Predefined prismatic joint type (translation along prismatic + joint axis is permitted.)""" +) +{ + uniform token physics:axis = "X" ( + allowedTokens = ["X", "Y", "Z"] + displayName = "Axis" + doc = "Joint axis." + ) + rel physics:body0 ( + displayName = "Body 0" + doc = "Relationship to any UsdGeomXformable." + ) + rel physics:body1 ( + displayName = "Body 1" + doc = "Relationship to any UsdGeomXformable." + ) + float physics:breakForce = inf ( + displayName = "Break Force" + doc = """Joint break force. If set, joint is to break when this force + limit is reached. (Used for linear DOFs.) + Units: mass * distance / second / second""" + ) + float physics:breakTorque = inf ( + displayName = "Break Torque" + doc = """Joint break torque. If set, joint is to break when this torque + limit is reached. (Used for angular DOFs.) + Units: mass * distance * distance / second / second""" + ) + bool physics:collisionEnabled = 0 ( + displayName = "Collision Enabled" + doc = "Determines if the jointed subtrees should collide or not." + ) + uniform bool physics:excludeFromArticulation = 0 ( + displayName = "Exclude From Articulation" + doc = "Determines if the joint can be included in an Articulation." + ) + bool physics:jointEnabled = 1 ( + displayName = "Joint Enabled" + doc = "Determines if the joint is enabled." + ) + point3f physics:localPos0 = (0, 0, 0) ( + displayName = "Local Position 0" + doc = "Relative position of the joint frame to body0's frame." + ) + point3f physics:localPos1 = (0, 0, 0) ( + displayName = "Local Position 1" + doc = "Relative position of the joint frame to body1's frame." + ) + quatf physics:localRot0 = (1, 0, 0, 0) ( + displayName = "Local Rotation 0" + doc = "Relative orientation of the joint frame to body0's frame." + ) + quatf physics:localRot1 = (1, 0, 0, 0) ( + displayName = "Local Rotation 1" + doc = "Relative orientation of the joint frame to body1's frame." + ) + float physics:lowerLimit = -inf ( + displayName = "Lower Limit" + doc = """Lower limit. Units: distance. -inf means not limited in + negative direction.""" + ) + float physics:upperLimit = inf ( + displayName = "Upper Limit" + doc = """Upper limit. Units: distance. inf means not limited in + positive direction.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class PhysicsSphericalJoint "PhysicsSphericalJoint" ( + doc = """Predefined spherical joint type (Removes linear degrees of + freedom, cone limit may restrict the motion in a given range.) It allows + two limit values, which when equal create a circular, else an elliptic + cone limit around the limit axis.""" +) +{ + uniform token physics:axis = "X" ( + allowedTokens = ["X", "Y", "Z"] + displayName = "Axis" + doc = "Cone limit axis." + ) + rel physics:body0 ( + displayName = "Body 0" + doc = "Relationship to any UsdGeomXformable." + ) + rel physics:body1 ( + displayName = "Body 1" + doc = "Relationship to any UsdGeomXformable." + ) + float physics:breakForce = inf ( + displayName = "Break Force" + doc = """Joint break force. If set, joint is to break when this force + limit is reached. (Used for linear DOFs.) + Units: mass * distance / second / second""" + ) + float physics:breakTorque = inf ( + displayName = "Break Torque" + doc = """Joint break torque. If set, joint is to break when this torque + limit is reached. (Used for angular DOFs.) + Units: mass * distance * distance / second / second""" + ) + bool physics:collisionEnabled = 0 ( + displayName = "Collision Enabled" + doc = "Determines if the jointed subtrees should collide or not." + ) + float physics:coneAngle0Limit = -1 ( + displayName = "Cone Angle0 Limit" + doc = """Cone limit from the primary joint axis in the local0 frame + toward the next axis. (Next axis of X is Y, and of Z is X.) A + negative value means not limited. Units: degrees.""" + ) + float physics:coneAngle1Limit = -1 ( + displayName = "Cone Angle1 Limit" + doc = """Cone limit from the primary joint axis in the local0 frame + toward the second to next axis. A negative value means not limited. + Units: degrees.""" + ) + uniform bool physics:excludeFromArticulation = 0 ( + displayName = "Exclude From Articulation" + doc = "Determines if the joint can be included in an Articulation." + ) + bool physics:jointEnabled = 1 ( + displayName = "Joint Enabled" + doc = "Determines if the joint is enabled." + ) + point3f physics:localPos0 = (0, 0, 0) ( + displayName = "Local Position 0" + doc = "Relative position of the joint frame to body0's frame." + ) + point3f physics:localPos1 = (0, 0, 0) ( + displayName = "Local Position 1" + doc = "Relative position of the joint frame to body1's frame." + ) + quatf physics:localRot0 = (1, 0, 0, 0) ( + displayName = "Local Rotation 0" + doc = "Relative orientation of the joint frame to body0's frame." + ) + quatf physics:localRot1 = (1, 0, 0, 0) ( + displayName = "Local Rotation 1" + doc = "Relative orientation of the joint frame to body1's frame." + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class PhysicsDistanceJoint "PhysicsDistanceJoint" ( + doc = """Predefined distance joint type (Distance between rigid bodies + may be limited to given minimum or maximum distance.)""" +) +{ + rel physics:body0 ( + displayName = "Body 0" + doc = "Relationship to any UsdGeomXformable." + ) + rel physics:body1 ( + displayName = "Body 1" + doc = "Relationship to any UsdGeomXformable." + ) + float physics:breakForce = inf ( + displayName = "Break Force" + doc = """Joint break force. If set, joint is to break when this force + limit is reached. (Used for linear DOFs.) + Units: mass * distance / second / second""" + ) + float physics:breakTorque = inf ( + displayName = "Break Torque" + doc = """Joint break torque. If set, joint is to break when this torque + limit is reached. (Used for angular DOFs.) + Units: mass * distance * distance / second / second""" + ) + bool physics:collisionEnabled = 0 ( + displayName = "Collision Enabled" + doc = "Determines if the jointed subtrees should collide or not." + ) + uniform bool physics:excludeFromArticulation = 0 ( + displayName = "Exclude From Articulation" + doc = "Determines if the joint can be included in an Articulation." + ) + bool physics:jointEnabled = 1 ( + displayName = "Joint Enabled" + doc = "Determines if the joint is enabled." + ) + point3f physics:localPos0 = (0, 0, 0) ( + displayName = "Local Position 0" + doc = "Relative position of the joint frame to body0's frame." + ) + point3f physics:localPos1 = (0, 0, 0) ( + displayName = "Local Position 1" + doc = "Relative position of the joint frame to body1's frame." + ) + quatf physics:localRot0 = (1, 0, 0, 0) ( + displayName = "Local Rotation 0" + doc = "Relative orientation of the joint frame to body0's frame." + ) + quatf physics:localRot1 = (1, 0, 0, 0) ( + displayName = "Local Rotation 1" + doc = "Relative orientation of the joint frame to body1's frame." + ) + float physics:maxDistance = -1 ( + displayName = "Maximum Distance" + doc = """Maximum distance. If attribute is negative, the joint is not + limited. Units: distance.""" + ) + float physics:minDistance = -1 ( + displayName = "Minimum Distance" + doc = """Minimum distance. If attribute is negative, the joint is not + limited. Units: distance.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class PhysicsFixedJoint "PhysicsFixedJoint" ( + doc = """Predefined fixed joint type (All degrees of freedom are + removed.)""" +) +{ + rel physics:body0 ( + displayName = "Body 0" + doc = "Relationship to any UsdGeomXformable." + ) + rel physics:body1 ( + displayName = "Body 1" + doc = "Relationship to any UsdGeomXformable." + ) + float physics:breakForce = inf ( + displayName = "Break Force" + doc = """Joint break force. If set, joint is to break when this force + limit is reached. (Used for linear DOFs.) + Units: mass * distance / second / second""" + ) + float physics:breakTorque = inf ( + displayName = "Break Torque" + doc = """Joint break torque. If set, joint is to break when this torque + limit is reached. (Used for angular DOFs.) + Units: mass * distance * distance / second / second""" + ) + bool physics:collisionEnabled = 0 ( + displayName = "Collision Enabled" + doc = "Determines if the jointed subtrees should collide or not." + ) + uniform bool physics:excludeFromArticulation = 0 ( + displayName = "Exclude From Articulation" + doc = "Determines if the joint can be included in an Articulation." + ) + bool physics:jointEnabled = 1 ( + displayName = "Joint Enabled" + doc = "Determines if the joint is enabled." + ) + point3f physics:localPos0 = (0, 0, 0) ( + displayName = "Local Position 0" + doc = "Relative position of the joint frame to body0's frame." + ) + point3f physics:localPos1 = (0, 0, 0) ( + displayName = "Local Position 1" + doc = "Relative position of the joint frame to body1's frame." + ) + quatf physics:localRot0 = (1, 0, 0, 0) ( + displayName = "Local Rotation 0" + doc = "Relative orientation of the joint frame to body0's frame." + ) + quatf physics:localRot1 = (1, 0, 0, 0) ( + displayName = "Local Rotation 1" + doc = "Relative orientation of the joint frame to body1's frame." + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) +} + +class "PhysicsLimitAPI" ( + doc = '''The PhysicsLimitAPI can be applied to a PhysicsJoint and will + restrict the movement along an axis. PhysicsLimitAPI is a multipleApply + schema: The PhysicsJoint can be restricted along "transX", "transY", + "transZ", "rotX", "rotY", "rotZ", "distance". Setting these as a + multipleApply schema TfToken name will define the degree of freedom the + PhysicsLimitAPI is applied to. Note that if the low limit is higher than + the high limit, motion along this axis is considered locked.''' +) +{ + float limit:__INSTANCE_NAME__:physics:high = inf ( + displayName = "High Limit" + doc = """Upper limit. Units: degrees or distance depending on trans or + rot axis applied to. inf means not limited in positive direction.""" + ) + float limit:__INSTANCE_NAME__:physics:low = -inf ( + displayName = "Low Limit" + doc = """Lower limit. Units: degrees or distance depending on trans or + rot axis applied to. -inf means not limited in negative direction.""" + ) +} + +class "PhysicsDriveAPI" ( + doc = '''The PhysicsDriveAPI when applied to any joint primitive will drive + the joint towards a given target. The PhysicsDriveAPI is a multipleApply + schema: drive can be set per axis "transX", "transY", "transZ", "rotX", + "rotY", "rotZ" or its "linear" for prismatic joint or "angular" for revolute + joints. Setting these as a multipleApply schema TfToken name will + define the degree of freedom the DriveAPI is applied to. Each drive is an + implicit force-limited damped spring: + Force or acceleration = stiffness * (targetPosition - position) + + damping * (targetVelocity - velocity)''' +) +{ + float drive:__INSTANCE_NAME__:physics:damping = 0 ( + doc = """Damping of the drive. Units: +\t\tif linear drive: mass/second +\t\tIf angular drive: mass*DIST_UNITS*DIST_UNITS/second/second/degrees.""" + ) + float drive:__INSTANCE_NAME__:physics:maxForce = inf ( + displayName = "Max Force" + doc = """Maximum force that can be applied to drive. Units: + if linear drive: mass*DIST_UNITS/second/second + if angular drive: mass*DIST_UNITS*DIST_UNITS/second/second + inf means not limited. Must be non-negative. + """ + ) + float drive:__INSTANCE_NAME__:physics:stiffness = 0 ( + doc = """Stiffness of the drive. Units: +\t\tif linear drive: mass/second/second +\t\tif angular drive: mass*DIST_UNITS*DIST_UNITS/degree/second/second.""" + ) + float drive:__INSTANCE_NAME__:physics:targetPosition = 0 ( + displayName = "Target Position" + doc = """Target value for position. Units: + if linear drive: distance + if angular drive: degrees.""" + ) + float drive:__INSTANCE_NAME__:physics:targetVelocity = 0 ( + displayName = "Target Velocity" + doc = """Target value for velocity. Units: + if linear drive: distance/second + if angular drive: degrees/second.""" + ) + uniform token drive:__INSTANCE_NAME__:physics:type = "force" ( + allowedTokens = ["force", "acceleration"] + displayName = "Type" + doc = """Drive spring is for the acceleration at the joint (rather + than the force).""" + ) +} + +class "PhysicsArticulationRootAPI" ( + doc = """PhysicsArticulationRootAPI can be applied to a scene graph node, + and marks the subtree rooted here for inclusion in one or more reduced + coordinate articulations. For floating articulations, this should be on + the root body. For fixed articulations (robotics jargon for e.g. a robot + arm for welding that is bolted to the floor), this API can be on a direct + or indirect parent of the root joint which is connected to the world, or + on the joint itself..""" +) +{ +} + diff --git a/blender/lib/usd/usdPhysics/resources/plugInfo.json b/blender/lib/usd/usdPhysics/resources/plugInfo.json new file mode 100644 index 0000000..3e7ab6e --- /dev/null +++ b/blender/lib/usd/usdPhysics/resources/plugInfo.json @@ -0,0 +1,198 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "kilogramsPerUnit": { + "appliesTo": [ + "layers" + ], + "default": 1.0, + "displayGroup": "Stage", + "type": "double" + } + }, + "Types": { + "UsdPhysicsArticulationRootAPI": { + "alias": { + "UsdSchemaBase": "PhysicsArticulationRootAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdPhysicsCollisionAPI": { + "alias": { + "UsdSchemaBase": "PhysicsCollisionAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdPhysicsCollisionGroup": { + "alias": { + "UsdSchemaBase": "PhysicsCollisionGroup" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdPhysicsDistanceJoint": { + "alias": { + "UsdSchemaBase": "PhysicsDistanceJoint" + }, + "autoGenerated": true, + "bases": [ + "UsdPhysicsJoint" + ], + "schemaKind": "concreteTyped" + }, + "UsdPhysicsDriveAPI": { + "alias": { + "UsdSchemaBase": "PhysicsDriveAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "multipleApplyAPI" + }, + "UsdPhysicsFilteredPairsAPI": { + "alias": { + "UsdSchemaBase": "PhysicsFilteredPairsAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdPhysicsFixedJoint": { + "alias": { + "UsdSchemaBase": "PhysicsFixedJoint" + }, + "autoGenerated": true, + "bases": [ + "UsdPhysicsJoint" + ], + "schemaKind": "concreteTyped" + }, + "UsdPhysicsJoint": { + "alias": { + "UsdSchemaBase": "PhysicsJoint" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomImageable" + ], + "schemaKind": "concreteTyped" + }, + "UsdPhysicsLimitAPI": { + "alias": { + "UsdSchemaBase": "PhysicsLimitAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "multipleApplyAPI" + }, + "UsdPhysicsMassAPI": { + "alias": { + "UsdSchemaBase": "PhysicsMassAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdPhysicsMaterialAPI": { + "alias": { + "UsdSchemaBase": "PhysicsMaterialAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdPhysicsMeshCollisionAPI": { + "alias": { + "UsdSchemaBase": "PhysicsMeshCollisionAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdPhysicsPrismaticJoint": { + "alias": { + "UsdSchemaBase": "PhysicsPrismaticJoint" + }, + "autoGenerated": true, + "bases": [ + "UsdPhysicsJoint" + ], + "schemaKind": "concreteTyped" + }, + "UsdPhysicsRevoluteJoint": { + "alias": { + "UsdSchemaBase": "PhysicsRevoluteJoint" + }, + "autoGenerated": true, + "bases": [ + "UsdPhysicsJoint" + ], + "schemaKind": "concreteTyped" + }, + "UsdPhysicsRigidBodyAPI": { + "alias": { + "UsdSchemaBase": "PhysicsRigidBodyAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdPhysicsScene": { + "alias": { + "UsdSchemaBase": "PhysicsScene" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdPhysicsSphericalJoint": { + "alias": { + "UsdSchemaBase": "PhysicsSphericalJoint" + }, + "autoGenerated": true, + "bases": [ + "UsdPhysicsJoint" + ], + "schemaKind": "concreteTyped" + } + } + }, + "LibraryPath": "", + "Name": "usdPhysics", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdPhysics/resources/usdPhysics/schema.usda b/blender/lib/usd/usdPhysics/resources/usdPhysics/schema.usda new file mode 100644 index 0000000..27faa61 --- /dev/null +++ b/blender/lib/usd/usdPhysics/resources/usdPhysics/schema.usda @@ -0,0 +1,879 @@ +#usda 1.0 +( + subLayers = [ + @usdGeom/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdPhysics" + string libraryPath = "pxr/usd/usdPhysics" + bool useLiteralIdentifier = 0 + dictionary libraryTokens = { + dictionary colliders = { + string doc = """ + This token represents the collection name to use + with UsdCollectionAPI to represent colliders + of a CollisionGroup prim. + """ + } + dictionary kilogramsPerUnit = { + string doc = """Stage-level metadata that encodes a scene's + linear unit of measure as kilograms per encoded unit.""" + } + dictionary transX = { + string doc = """ + This token represents the translate around X axis degree of freedom + used in Joint Limits and Drives. + """ + } + dictionary transY = { + string doc = """ + This token represents the translate around Y axis degree of freedom + used in Joint Limits and Drives. + """ + } + dictionary transZ = { + string doc = """ + This token represents the translate around Z axis degree of freedom + used in Joint Limits and Drives. + """ + } + dictionary rotX = { + string doc = """ + This token represents the rotate around X axis degree of freedom + used in Joint Limits and Drives. + """ + } + dictionary rotY = { + string doc = """ + This token represents the rotate around Y axis degree of freedom + used in Joint Limits and Drives. + """ + } + dictionary rotZ = { + string doc = """ + This token represents the rotate around Z axis degree of freedom + used in Joint Limits and Drives. + """ + } + dictionary linear = { + string doc = """ + This token represents the linear degree of freedom + used in Prismatic Joint Drive. + """ + } + dictionary angular = { + string doc = """ + This token represents the angular degree of freedom + used in Revolute Joint Drive. + """ + } + dictionary distance = { + string doc = """ + This token represents the distance limit used for generic D6 joint. + """ + } + + } + } +) +{ + +} + +class PhysicsScene "PhysicsScene" +( + customData = { + string className = "Scene" + } + doc = """General physics simulation properties, required for simulation.""" + + inherits = +) +{ + vector3f physics:gravityDirection = (0.0, 0.0, 0.0) ( + customData = { + string apiName = "gravityDirection" + } + displayName = "Gravity Direction" + doc = """Gravity direction vector in simulation world space. Will be + normalized before use. A zero vector is a request to use the negative + upAxis. Unitless.""" + ) + + float physics:gravityMagnitude = -inf ( + customData = { + string apiName = "gravityMagnitude" + } + displayName = "Gravity Magnitude" + doc = """Gravity acceleration magnitude in simulation world space. + A negative value is a request to use a value equivalent to earth + gravity regardless of the metersPerUnit scaling used by this scene. + Units: distance/second/second.""" + ) +} + +class "PhysicsRigidBodyAPI" +( + customData = { + string className = "RigidBodyAPI" + string extraIncludes = """ +#include "pxr/base/gf/matrix3f.h" +#include "pxr/base/gf/quatf.h" """ + } + doc = """Applies physics body attributes to any UsdGeomXformable prim and + marks that prim to be driven by a simulation. If a simulation is running + it will update this prim's pose. All prims in the hierarchy below this + prim should move accordingly.""" + + inherits = + +) + +{ + + bool physics:rigidBodyEnabled = true ( + customData = { + string apiName = "rigidBodyEnabled" + } + displayName = "Rigid Body Enabled" + doc = """Determines if this PhysicsRigidBodyAPI is enabled.""" + ) + + bool physics:kinematicEnabled = false ( + customData = { + string apiName = "kinematicEnabled" + } + displayName = "Kinematic Enabled" + doc = """Determines whether the body is kinematic or not. A kinematic + body is a body that is moved through animated poses or through + user defined poses. The simulation derives velocities for the + kinematic body based on the external motion. When a continuous motion + is not desired, this kinematic flag should be set to false.""" + ) + + rel physics:simulationOwner ( + customData = { + string apiName = "simulationOwner" + } + displayName = "Simulation Owner" + doc = """Single PhysicsScene that will simulate this body. By + default this is the first PhysicsScene found in the stage using + UsdStage::Traverse().""" + + ) + + uniform bool physics:startsAsleep = false ( + customData = { + string apiName = "startsAsleep" + } + displayName = "Starts as Asleep" + doc = "Determines if the body is asleep when the simulation starts." + ) + + vector3f physics:velocity= (0.0, 0.0, 0.0) ( + customData = { + string apiName = "velocity" + } + displayName = "Linear Velocity" + doc = """Linear velocity in the same space as the node's xform. + Units: distance/second.""" + ) + + vector3f physics:angularVelocity = (0.0, 0.0, 0.0) ( + customData = { + string apiName = "angularVelocity" + } + displayName = "Angular Velocity" + doc = """Angular velocity in the same space as the node's xform. + Units: degrees/second.""" + ) +} + +class "PhysicsMassAPI" +( + customData = { + string className = "MassAPI" + } + + doc = """Defines explicit mass properties (mass, density, inertia etc.). + MassAPI can be applied to any object that has a PhysicsCollisionAPI or + a PhysicsRigidBodyAPI.""" + + inherits = +) +{ + float physics:mass = 0.0 ( + customData = { + string apiName = "mass" + } + displayName = "Mass" + doc = """If non-zero, directly specifies the mass of the object. + Note that any child prim can also have a mass when they apply massAPI. + In this case, the precedence rule is 'parent mass overrides the + child's'. This may come as counter-intuitive, but mass is a computed + quantity and in general not accumulative. For example, if a parent + has mass of 10, and one of two children has mass of 20, allowing + child's mass to override its parent results in a mass of -10 for the + other child. Note if mass is 0.0 it is ignored. Units: mass. + """ + ) + + float physics:density = 0.0 ( + customData = { + string apiName = "density" + } + displayName = "Density" + doc = """If non-zero, specifies the density of the object. + In the context of rigid body physics, density indirectly results in + setting mass via (mass = density x volume of the object). How the + volume is computed is up to implementation of the physics system. + It is generally computed from the collision approximation rather than + the graphical mesh. In the case where both density and mass are + specified for the same object, mass has precedence over density. + Unlike mass, child's prim's density overrides parent prim's density + as it is accumulative. Note that density of a collisionAPI can be also + alternatively set through a PhysicsMaterialAPI. The material density + has the weakest precedence in density definition. Note if density is + 0.0 it is ignored. Units: mass/distance/distance/distance.""" + ) + + point3f physics:centerOfMass = (-inf, -inf, -inf) ( + customData = { + string apiName = "centerOfMass" + } + displayName = "Center of Mass" + doc = """Center of mass in the prim's local space. Units: distance.""" + ) + + float3 physics:diagonalInertia = (0.0, 0.0, 0.0) ( + customData = { + string apiName = "diagonalInertia" + } + displayName = "Diagonal Inertia" + doc = """If non-zero, specifies diagonalized inertia tensor along the + principal axes. Note if diagonalInertial is (0.0, 0.0, 0.0) it is + ignored. Units: mass*distance*distance.""" + ) + + quatf physics:principalAxes = (0, 0, 0, 0) ( + customData = { + string apiName = "principalAxes" + } + displayName = "Principal Axes" + doc = """Orientation of the inertia tensor's principal axes in the + prim's local space.""" + ) +} + +class "PhysicsCollisionAPI" +( + customData = { + string className = "CollisionAPI" + } + doc = """Applies collision attributes to a UsdGeomXformable prim. If a + simulation is running, this geometry will collide with other geometries that + have PhysicsCollisionAPI applied. If a prim in the parent hierarchy has the + RigidBodyAPI applied, this collider is a part of that body. If there is + no body in the parent hierarchy, this collider is considered to be static.""" + + inherits = +) +{ + bool physics:collisionEnabled = true ( + customData = { + string apiName = "collisionEnabled" + } + displayName = "Collision Enabled" + doc = """Determines if the PhysicsCollisionAPI is enabled.""" + ) + + rel physics:simulationOwner ( + customData = { + string apiName = "simulationOwner" + } + displayName = "Simulation Owner" + doc = """Single PhysicsScene that will simulate this collider. + By default this object belongs to the first PhysicsScene. + Note that if a RigidBodyAPI in the hierarchy above has a different + simulationOwner then it has a precedence over this relationship.""" + ) +} + +class "PhysicsMeshCollisionAPI" +( + customData = { + string className = "MeshCollisionAPI" + } + doc = """Attributes to control how a Mesh is made into a collider. + Can be applied to only a USDGeomMesh in addition to its + PhysicsCollisionAPI.""" + + inherits = +) +{ + uniform token physics:approximation = "none" ( + customData = { + string apiName = "approximation" + } + allowedTokens = ["none","convexDecomposition","convexHull", + "boundingSphere","boundingCube","meshSimplification"] + + displayName = "Approximation" + doc = """Determines the mesh's collision approximation: + "none" - The mesh geometry is used directly as a collider without any + approximation. + "convexDecomposition" - A convex mesh decomposition is performed. This + results in a set of convex mesh colliders. + "convexHull" - A convex hull of the mesh is generated and used as the + collider. + "boundingSphere" - A bounding sphere is computed around the mesh and used + as a collider. + "boundingCube" - An optimally fitting box collider is computed around the + mesh. + "meshSimplification" - A mesh simplification step is performed, resulting + in a simplified triangle mesh collider.""" + ) +} + +class "PhysicsMaterialAPI" +( + customData = { + string className = "MaterialAPI" + } + doc = """ Adds simulation material properties to a Material. All collisions + that have a relationship to this material will have their collision response + defined through this material.""" + + inherits = +) +{ + float physics:dynamicFriction = 0.0 ( + customData = { + string apiName = "dynamicFriction" + } + displayName = "Dynamic Friction" + doc = """Dynamic friction coefficient. Unitless.""" + ) + + float physics:staticFriction = 0.0 ( + customData = { + string apiName = "staticFriction" + } + displayName = "Static Friction" + doc = """Static friction coefficient. Unitless.""" + ) + + float physics:restitution = 0.0 ( + customData = { + string apiName = "restitution" + } + displayName = "Restitution" + doc = """Restitution coefficient. Unitless.""" + ) + + float physics:density = 0.0 ( + customData = { + string apiName = "density" + } + displayName = "Density" + doc = """If non-zero, defines the density of the material. This can be + used for body mass computation, see PhysicsMassAPI. + Note that if the density is 0.0 it is ignored. + Units: mass/distance/distance/distance.""" + ) +} + +class PhysicsCollisionGroup "PhysicsCollisionGroup" +( + customData = { + string className = "CollisionGroup" + string extraIncludes = """ +#include "pxr/usd/usd/collectionAPI.h" """ + } + doc = """Defines a collision group for coarse filtering. When a collision + occurs between two objects that have a PhysicsCollisionGroup assigned, + they will collide with each other unless this PhysicsCollisionGroup pair + is filtered. See filteredGroups attribute. + + A CollectionAPI:colliders maintains a list of PhysicsCollisionAPI rel-s that + defines the members of this Collisiongroup. + """ + + inherits = + prepend apiSchemas = ["CollectionAPI:colliders"] +) +{ + rel physics:filteredGroups ( + customData = { + string apiName = "filteredGroups" + } + + displayName = "Filtered Groups" + doc = """References a list of PhysicsCollisionGroups with which + collisions should be ignored.""" + ) + + string physics:mergeGroup ( + customData = { + string apiName = "mergeGroupName" + } + + displayName = "Merge With Groups" + doc = """If non-empty, any collision groups in a stage with a matching + mergeGroup should be considered to refer to the same collection. Matching + collision groups should behave as if there were a single group containing + referenced colliders and filter groups from both collections.""" + ) + + bool physics:invertFilteredGroups ( + customData = { + string apiName = "invertFilteredGroups" + } + + displayName = "Invert Filtered Groups" + doc = """Normally, the filter will disable collisions against the selected + filter groups. However, if this option is set, the filter will disable + collisions against all colliders except for those in the selected filter + groups.""" + ) +} + +class "PhysicsFilteredPairsAPI" +( + customData = { + string className = "FilteredPairsAPI" + } + doc = """API to describe fine-grained filtering. If a collision between + two objects occurs, this pair might be filtered if the pair is defined + through this API. This API can be applied either to a body or collision + or even articulation. The "filteredPairs" defines what objects it should + not collide against. Note that FilteredPairsAPI filtering has precedence + over CollisionGroup filtering.""" + + inherits = +) +{ + rel physics:filteredPairs ( + customData = { + string apiName = "filteredPairs" + } + displayName = "Filtered Pairs" + doc = """Relationship to objects that should be filtered.""" + ) +} + + +class PhysicsJoint "PhysicsJoint" +( + customData = { + string className = "Joint" + } + doc = """A joint constrains the movement of rigid bodies. Joint can be + created between two rigid bodies or between one rigid body and world. + By default joint primitive defines a D6 joint where all degrees of + freedom are free. Three linear and three angular degrees of freedom. + Note that default behavior is to disable collision between jointed bodies. + """ + + inherits = +) +{ + rel physics:body0 ( + customData = { + string apiName = "body0" + } + displayName = "Body 0" + doc = """Relationship to any UsdGeomXformable.""" + ) + + rel physics:body1 ( + customData = { + string apiName = "body1" + } + displayName = "Body 1" + doc = """Relationship to any UsdGeomXformable.""" + ) + + point3f physics:localPos0 = (0.0, 0.0, 0.0) ( + customData = { + string apiName = "localPos0" + } + displayName = "Local Position 0" + doc = """Relative position of the joint frame to body0's frame.""" + ) + + quatf physics:localRot0 = (1.0, 0.0, 0.0, 0.0) ( + customData = { + string apiName = "localRot0" + } + displayName = "Local Rotation 0" + doc = """Relative orientation of the joint frame to body0's frame.""" + ) + + point3f physics:localPos1 = (0.0, 0.0, 0.0) ( + customData = { + string apiName = "localPos1" + } + displayName = "Local Position 1" + doc = """Relative position of the joint frame to body1's frame.""" + ) + + quatf physics:localRot1 = (1.0, 0.0, 0.0, 0.0) ( + customData = { + string apiName = "localRot1" + } + displayName = "Local Rotation 1" + doc = """Relative orientation of the joint frame to body1's frame.""" + ) + + bool physics:jointEnabled = true ( + customData = { + string apiName = "jointEnabled" + } + displayName = "Joint Enabled" + doc = """Determines if the joint is enabled.""" + ) + + bool physics:collisionEnabled = false ( + customData = { + string apiName = "collisionEnabled" + } + displayName = "Collision Enabled" + doc = """Determines if the jointed subtrees should collide or not.""" + ) + + uniform bool physics:excludeFromArticulation = false ( + customData = { + string apiName = "excludeFromArticulation" + } + displayName = "Exclude From Articulation" + doc = """Determines if the joint can be included in an Articulation.""" + ) + + float physics:breakForce = inf ( + customData = { + string apiName = "breakForce" + } + displayName = "Break Force" + doc = """Joint break force. If set, joint is to break when this force + limit is reached. (Used for linear DOFs.) + Units: mass * distance / second / second""" + ) + + float physics:breakTorque = inf ( + customData = { + string apiName = "breakTorque" + } + displayName = "Break Torque" + doc = """Joint break torque. If set, joint is to break when this torque + limit is reached. (Used for angular DOFs.) + Units: mass * distance * distance / second / second""" + ) + +} + +class PhysicsRevoluteJoint "PhysicsRevoluteJoint" +( + customData = { + string className = "RevoluteJoint" + } + doc = """Predefined revolute joint type (rotation along revolute joint + axis is permitted.)""" + + inherits = +) +{ + uniform token physics:axis = "X" ( + customData = { + string apiName = "axis" + } + allowedTokens = ["X", "Y", "Z"] + displayName = "Axis" + doc = """Joint axis.""" + ) + + float physics:lowerLimit = -inf ( + customData = { + string apiName = "lowerLimit" + } + displayName = "Lower Limit" + doc = """Lower limit. Units: degrees. -inf means not limited in + negative direction.""" + ) + + float physics:upperLimit = inf ( + customData = { + string apiName = "upperLimit" + } + displayName = "Upper Limit" + doc = """Upper limit. Units: degrees. inf means not limited in + positive direction.""" + ) +} + +class PhysicsPrismaticJoint "PhysicsPrismaticJoint" +( + customData = { + string className = "PrismaticJoint" + } + doc = """Predefined prismatic joint type (translation along prismatic + joint axis is permitted.)""" + + inherits = +) +{ + uniform token physics:axis = "X" ( + customData = { + string apiName = "axis" + } + displayName = "Axis" + allowedTokens = ["X", "Y", "Z"] + doc = """Joint axis.""" + ) + + float physics:lowerLimit = -inf ( + customData = { + string apiName = "lowerLimit" + } + displayName = "Lower Limit" + doc = """Lower limit. Units: distance. -inf means not limited in + negative direction.""" + ) + + float physics:upperLimit = inf ( + customData = { + string apiName = "upperLimit" + } + displayName = "Upper Limit" + doc = """Upper limit. Units: distance. inf means not limited in + positive direction.""" + ) +} + +class PhysicsSphericalJoint "PhysicsSphericalJoint" +( + customData = { + string className = "SphericalJoint" + } + doc = """Predefined spherical joint type (Removes linear degrees of + freedom, cone limit may restrict the motion in a given range.) It allows + two limit values, which when equal create a circular, else an elliptic + cone limit around the limit axis.""" + + inherits = +) +{ + uniform token physics:axis = "X" ( + customData = { + string apiName = "axis" + } + allowedTokens = ["X", "Y", "Z"] + displayName = "Axis" + doc = """Cone limit axis.""" + ) + + float physics:coneAngle0Limit = -1.0 ( + customData = { + string apiName = "coneAngle0Limit" + } + displayName = "Cone Angle0 Limit" + doc = """Cone limit from the primary joint axis in the local0 frame + toward the next axis. (Next axis of X is Y, and of Z is X.) A + negative value means not limited. Units: degrees.""" + ) + + float physics:coneAngle1Limit = -1.0 ( + customData = { + string apiName = "coneAngle1Limit" + } + displayName = "Cone Angle1 Limit" + doc = """Cone limit from the primary joint axis in the local0 frame + toward the second to next axis. A negative value means not limited. + Units: degrees.""" + ) +} + + +class PhysicsDistanceJoint "PhysicsDistanceJoint" +( + customData = { + string className = "DistanceJoint" + } + doc = """Predefined distance joint type (Distance between rigid bodies + may be limited to given minimum or maximum distance.)""" + + inherits = +) +{ + float physics:minDistance = -1.0 ( + customData = { + string apiName = "minDistance" + } + displayName = "Minimum Distance" + doc = """Minimum distance. If attribute is negative, the joint is not + limited. Units: distance.""" + ) + + float physics:maxDistance = -1.0 ( + customData = { + string apiName = "maxDistance" + } + displayName = "Maximum Distance" + doc = """Maximum distance. If attribute is negative, the joint is not + limited. Units: distance.""" + ) +} + +class PhysicsFixedJoint "PhysicsFixedJoint" +( + customData = { + string className = "FixedJoint" + } + doc = """Predefined fixed joint type (All degrees of freedom are + removed.)""" + + inherits = +) +{ +} + +class "PhysicsLimitAPI" +( + customData = { + string className = "LimitAPI" + token apiSchemaType = "multipleApply" + token propertyNamespacePrefix = "limit" + } + doc = """The PhysicsLimitAPI can be applied to a PhysicsJoint and will + restrict the movement along an axis. PhysicsLimitAPI is a multipleApply + schema: The PhysicsJoint can be restricted along "transX", "transY", + "transZ", "rotX", "rotY", "rotZ", "distance". Setting these as a + multipleApply schema TfToken name will define the degree of freedom the + PhysicsLimitAPI is applied to. Note that if the low limit is higher than + the high limit, motion along this axis is considered locked.""" + + inherits = + +) +{ + float physics:low = -inf ( + customData = { + string apiName = "low" + } + displayName = "Low Limit" + doc = """Lower limit. Units: degrees or distance depending on trans or + rot axis applied to. -inf means not limited in negative direction.""" + ) + + float physics:high = inf ( + customData = { + string apiName = "high" + } + displayName = "High Limit" + doc = """Upper limit. Units: degrees or distance depending on trans or + rot axis applied to. inf means not limited in positive direction.""" + ) +} + +class "PhysicsDriveAPI" +( + customData = { + string className = "DriveAPI" + token apiSchemaType = "multipleApply" + token propertyNamespacePrefix = "drive" + } + doc = """The PhysicsDriveAPI when applied to any joint primitive will drive + the joint towards a given target. The PhysicsDriveAPI is a multipleApply + schema: drive can be set per axis "transX", "transY", "transZ", "rotX", + "rotY", "rotZ" or its "linear" for prismatic joint or "angular" for revolute + joints. Setting these as a multipleApply schema TfToken name will + define the degree of freedom the DriveAPI is applied to. Each drive is an + implicit force-limited damped spring: + Force or acceleration = stiffness * (targetPosition - position) + + damping * (targetVelocity - velocity)""" + + inherits = +) +{ + uniform token physics:type = "force" ( + customData = { + string apiName = "type" + } + allowedTokens = ["force", "acceleration"] + displayName = "Type" + doc = """Drive spring is for the acceleration at the joint (rather + than the force).""" + ) + + float physics:maxForce = inf ( + customData = { + string apiName = "maxForce" + } + displayName = "Max Force" + doc = """Maximum force that can be applied to drive. Units: + if linear drive: mass*DIST_UNITS/second/second + if angular drive: mass*DIST_UNITS*DIST_UNITS/second/second + inf means not limited. Must be non-negative. + """ + ) + + float physics:targetPosition = 0.0 ( + customData = { + string apiName = "targetPosition" + } + displayName = "Target Position" + doc = """Target value for position. Units: + if linear drive: distance + if angular drive: degrees.""" + ) + + float physics:targetVelocity = 0.0 ( + customData = { + string apiName = "targetVelocity" + } + displayName = "Target Velocity" + doc = """Target value for velocity. Units: + if linear drive: distance/second + if angular drive: degrees/second.""" + ) + + float physics:damping = 0.0 ( + customData = { + string apiName = "damping" + } + doc = """Damping of the drive. Units: + if linear drive: mass/second + If angular drive: mass*DIST_UNITS*DIST_UNITS/second/second/degrees.""" + ) + + float physics:stiffness = 0.0 ( + customData = { + string apiName = "stiffness" + } + doc = """Stiffness of the drive. Units: + if linear drive: mass/second/second + if angular drive: mass*DIST_UNITS*DIST_UNITS/degree/second/second.""" + ) +} + +class "PhysicsArticulationRootAPI" +( + customData = { + string className = "ArticulationRootAPI" + } + doc = """PhysicsArticulationRootAPI can be applied to a scene graph node, + and marks the subtree rooted here for inclusion in one or more reduced + coordinate articulations. For floating articulations, this should be on + the root body. For fixed articulations (robotics jargon for e.g. a robot + arm for welding that is bolted to the floor), this API can be on a direct + or indirect parent of the root joint which is connected to the world, or + on the joint itself..""" + + inherits = +) +{ +} + diff --git a/blender/lib/usd/usdProc/resources/generatedSchema.usda b/blender/lib/usd/usdProc/resources/generatedSchema.usda new file mode 100644 index 0000000..419dc32 --- /dev/null +++ b/blender/lib/usd/usdProc/resources/generatedSchema.usda @@ -0,0 +1,97 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class GenerativeProcedural "GenerativeProcedural" ( + doc = """ + Represents an abstract generative procedural prim which delivers its input + parameters via properties (including relationships) within the \"primvars:\" + namespace. + + It does not itself have any awareness or participation in the execution of + the procedural but rather serves as a means of delivering a procedural's + definition and input parameters. + + The value of its \"proceduralSystem\" property (either authored or provided + by API schema fallback) indicates to which system the procedural definition + is meaningful. + """ +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + token proceduralSystem ( + doc = """The name or convention of the system responsible for evaluating + the procedural. + NOTE: A fallback value for this is typically set via an API schema.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + diff --git a/blender/lib/usd/usdProc/resources/plugInfo.json b/blender/lib/usd/usdProc/resources/plugInfo.json new file mode 100644 index 0000000..97110e4 --- /dev/null +++ b/blender/lib/usd/usdProc/resources/plugInfo.json @@ -0,0 +1,28 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdProcGenerativeProcedural": { + "alias": { + "UsdSchemaBase": "GenerativeProcedural" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomBoundable" + ], + "schemaKind": "concreteTyped" + } + } + }, + "LibraryPath": "", + "Name": "usdProc", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdProc/resources/usdProc/schema.usda b/blender/lib/usd/usdProc/resources/usdProc/schema.usda new file mode 100644 index 0000000..28ba4e4 --- /dev/null +++ b/blender/lib/usd/usdProc/resources/usdProc/schema.usda @@ -0,0 +1,45 @@ +#usda 1.0 +( + "This file describes the Pixar-specific USD Geometric schemata for code generation." + subLayers = [ + @usdGeom/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdProc" + string libraryPath = "pxr/usd/usdProc" + dictionary libraryTokens = { + + } + } +){ +} + +class GenerativeProcedural "GenerativeProcedural" ( + inherits = + doc = """ + Represents an abstract generative procedural prim which delivers its input + parameters via properties (including relationships) within the "primvars:" + namespace. + + It does not itself have any awareness or participation in the execution of + the procedural but rather serves as a means of delivering a procedural's + definition and input parameters. + + The value of its "proceduralSystem" property (either authored or provided + by API schema fallback) indicates to which system the procedural definition + is meaningful. + """ + customData = { + + } +){ + + token proceduralSystem ( + doc = """The name or convention of the system responsible for evaluating + the procedural. + NOTE: A fallback value for this is typically set via an API schema.""" + ) +} diff --git a/blender/lib/usd/usdProcImaging/resources/plugInfo.json b/blender/lib/usd/usdProcImaging/resources/plugInfo.json new file mode 100644 index 0000000..5320b49 --- /dev/null +++ b/blender/lib/usd/usdProcImaging/resources/plugInfo.json @@ -0,0 +1,22 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + + "UsdProcImagingGenerativeProceduralAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "primTypeName": "GenerativeProcedural" + } + } + }, + "LibraryPath": "", + "Name": "usdProcImaging", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdRender/resources/generatedSchema.usda b/blender/lib/usd/usdRender/resources/generatedSchema.usda new file mode 100644 index 0000000..a0e574e --- /dev/null +++ b/blender/lib/usd/usdRender/resources/generatedSchema.usda @@ -0,0 +1,504 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class "RenderSettingsBase" ( + doc = """Abstract base class that defines render settings that + can be specified on either a RenderSettings prim or a RenderProduct + prim.""" +) +{ + uniform token aspectRatioConformPolicy = "expandAperture" ( + allowedTokens = ["expandAperture", "cropAperture", "adjustApertureWidth", "adjustApertureHeight", "adjustPixelAspectRatio"] + doc = '''Indicates the policy to use to resolve an aspect + ratio mismatch between the camera aperture and image settings. + + This policy allows a standard render setting to do something + reasonable given varying camera inputs. + + The camera aperture aspect ratio is determined by the + aperture atributes on the UsdGeomCamera. + + The image aspect ratio is determined by the resolution and + pixelAspectRatio attributes in the render settings. + + - "expandAperture": if necessary, expand the aperture to + fit the image, exposing additional scene content + - "cropAperture": if necessary, crop the aperture to fit + the image, cropping scene content + - "adjustApertureWidth": if necessary, adjust aperture width + to make its aspect ratio match the image + - "adjustApertureHeight": if necessary, adjust aperture height + to make its aspect ratio match the image + - "adjustPixelAspectRatio": compute pixelAspectRatio to + make the image exactly cover the aperture; disregards + existing attribute value of pixelAspectRatio + ''' + ) + rel camera ( + doc = """The _camera_ relationship specifies the primary + camera to use in a render. It must target a UsdGeomCamera.""" + ) + uniform float4 dataWindowNDC = (0, 0, 1, 1) ( + doc = """dataWindowNDC specifies the axis-aligned rectangular + region in the adjusted aperture window within which the renderer + should produce data. + + It is specified as (xmin, ymin, xmax, ymax) in normalized + device coordinates, where the range 0 to 1 corresponds to the + aperture. (0,0) corresponds to the bottom-left + corner and (1,1) corresponds to the upper-right corner. + + Specifying a window outside the unit square will produce + overscan data. Specifying a window that does not cover the unit + square will produce a cropped render. + + A pixel is included in the rendered result if the pixel + center is contained by the data window. This is consistent + with standard rules used by polygon rasterization engines. + \\ref UsdRenderRasterization + + The data window is expressed in NDC so that cropping and + overscan may be resolution independent. In interactive + workflows, incremental cropping and resolution adjustment + may be intermixed to isolate and examine parts of the scene. + In compositing workflows, overscan may be used to support + image post-processing kernels, and reduced-resolution proxy + renders may be used for faster iteration. + + The dataWindow:ndc coordinate system references the + aperture after any adjustments required by + aspectRatioConformPolicy. + """ + ) + uniform bool disableMotionBlur = 0 ( + doc = """Disable all motion blur by setting the shutter interval + of the targeted camera to [0,0] - that is, take only one sample, + namely at the current time code.""" + ) + uniform bool instantaneousShutter = 0 ( + doc = """Deprecated - use disableMotionBlur instead. Override + the targeted _camera_'s _shutterClose_ to be equal to the + value of its _shutterOpen_, to produce a zero-width shutter + interval. This gives us a convenient way to disable motion + blur.""" + ) + uniform float pixelAspectRatio = 1 ( + doc = """The aspect ratio (width/height) of image pixels.. + The default ratio 1.0 indicates square pixels.""" + ) + uniform int2 resolution = (2048, 1080) ( + doc = """The image pixel resolution, corresponding to the + camera's screen window.""" + ) +} + +class RenderSettings "RenderSettings" ( + doc = """A UsdRenderSettings prim specifies global settings for + a render process, including an enumeration of the RenderProducts + that should result, and the UsdGeomImageable purposes that should + be rendered. \\ref UsdRenderHowSettingsAffectRendering""" +) +{ + uniform token aspectRatioConformPolicy = "expandAperture" ( + allowedTokens = ["expandAperture", "cropAperture", "adjustApertureWidth", "adjustApertureHeight", "adjustPixelAspectRatio"] + doc = '''Indicates the policy to use to resolve an aspect + ratio mismatch between the camera aperture and image settings. + + This policy allows a standard render setting to do something + reasonable given varying camera inputs. + + The camera aperture aspect ratio is determined by the + aperture atributes on the UsdGeomCamera. + + The image aspect ratio is determined by the resolution and + pixelAspectRatio attributes in the render settings. + + - "expandAperture": if necessary, expand the aperture to + fit the image, exposing additional scene content + - "cropAperture": if necessary, crop the aperture to fit + the image, cropping scene content + - "adjustApertureWidth": if necessary, adjust aperture width + to make its aspect ratio match the image + - "adjustApertureHeight": if necessary, adjust aperture height + to make its aspect ratio match the image + - "adjustPixelAspectRatio": compute pixelAspectRatio to + make the image exactly cover the aperture; disregards + existing attribute value of pixelAspectRatio + ''' + ) + rel camera ( + doc = """The _camera_ relationship specifies the primary + camera to use in a render. It must target a UsdGeomCamera.""" + ) + uniform float4 dataWindowNDC = (0, 0, 1, 1) ( + doc = """dataWindowNDC specifies the axis-aligned rectangular + region in the adjusted aperture window within which the renderer + should produce data. + + It is specified as (xmin, ymin, xmax, ymax) in normalized + device coordinates, where the range 0 to 1 corresponds to the + aperture. (0,0) corresponds to the bottom-left + corner and (1,1) corresponds to the upper-right corner. + + Specifying a window outside the unit square will produce + overscan data. Specifying a window that does not cover the unit + square will produce a cropped render. + + A pixel is included in the rendered result if the pixel + center is contained by the data window. This is consistent + with standard rules used by polygon rasterization engines. + \\ref UsdRenderRasterization + + The data window is expressed in NDC so that cropping and + overscan may be resolution independent. In interactive + workflows, incremental cropping and resolution adjustment + may be intermixed to isolate and examine parts of the scene. + In compositing workflows, overscan may be used to support + image post-processing kernels, and reduced-resolution proxy + renders may be used for faster iteration. + + The dataWindow:ndc coordinate system references the + aperture after any adjustments required by + aspectRatioConformPolicy. + """ + ) + uniform bool disableMotionBlur = 0 ( + doc = """Disable all motion blur by setting the shutter interval + of the targeted camera to [0,0] - that is, take only one sample, + namely at the current time code.""" + ) + uniform token[] includedPurposes = ["default", "render"] ( + doc = """The list of UsdGeomImageable _purpose_ values that + should be included in the render. Note this cannot be + specified per-RenderProduct because it is a statement of + which geometry is present.""" + ) + uniform bool instantaneousShutter = 0 ( + doc = """Deprecated - use disableMotionBlur instead. Override + the targeted _camera_'s _shutterClose_ to be equal to the + value of its _shutterOpen_, to produce a zero-width shutter + interval. This gives us a convenient way to disable motion + blur.""" + ) + uniform token[] materialBindingPurposes = ["full", ""] ( + allowedTokens = ["full", "preview", ""] + doc = '''Ordered list of material purposes to consider when + resolving material bindings in the scene. The empty string + indicates the "allPurpose" binding.''' + ) + uniform float pixelAspectRatio = 1 ( + doc = """The aspect ratio (width/height) of image pixels.. + The default ratio 1.0 indicates square pixels.""" + ) + rel products ( + doc = """The set of RenderProducts the render should produce. + This relationship should target UsdRenderProduct prims. + If no _products_ are specified, an application should produce + an rgb image according to the RenderSettings configuration, + to a default display or image name.""" + ) + uniform token renderingColorSpace ( + doc = """Describes a renderer's working (linear) colorSpace where all + the renderer/shader math is expected to happen. When no + renderingColorSpace is provided, renderer should use its own default.""" + ) + uniform int2 resolution = (2048, 1080) ( + doc = """The image pixel resolution, corresponding to the + camera's screen window.""" + ) +} + +class RenderVar "RenderVar" ( + doc = """A UsdRenderVar describes a custom data variable for + a render to produce. The prim describes the source of the data, which + can be a shader output or an LPE (Light Path Expression), and also + allows encoding of (generally renderer-specific) parameters that + configure the renderer for computing the variable. + + \\note The name of the RenderVar prim drives the name of the data + variable that the renderer will produce. + + \\note In the future, UsdRender may standardize RenderVar representation + for well-known variables under the sourceType `intrinsic`, + such as _r_, _g_, _b_, _a_, _z_, or _id_. + """ +) +{ + uniform token dataType = "color3f" ( + doc = "The type of this channel, as a USD attribute type." + ) + uniform string sourceName = "" ( + doc = """The renderer should look for an output of this name + as the computed value for the RenderVar.""" + ) + uniform token sourceType = "raw" ( + allowedTokens = ["raw", "primvar", "lpe", "intrinsic"] + doc = '''Indicates the type of the source. + + - "raw": The name should be passed directly to the + renderer. This is the default behavior. + - "primvar": This source represents the name of a primvar. + Some renderers may use this to ensure that the primvar + is provided; other renderers may require that a suitable + material network be provided, in which case this is simply + an advisory setting. + - "lpe": Specifies a Light Path Expression in the + [OSL Light Path Expressions language](https://github.com/imageworks/OpenShadingLanguage/wiki/OSL-Light-Path-Expressions) as the source for + this RenderVar. Some renderers may use extensions to + that syntax, which will necessarily be non-portable. + - "intrinsic": This setting is currently unimplemented, + but represents a future namespace for UsdRender to provide + portable baseline RenderVars, such as camera depth, that + may have varying implementations for each renderer. + ''' + ) +} + +class RenderProduct "RenderProduct" ( + doc = """A UsdRenderProduct describes an image or other + file-like artifact produced by a render. A RenderProduct + combines one or more RenderVars into a file or interactive + buffer. It also provides all the controls established in + UsdRenderSettingsBase as optional overrides to whatever the + owning UsdRenderSettings prim dictates. + + Specific renderers may support additional settings, such + as a way to configure compression settings, filetype metadata, + and so forth. Such settings can be encoded using + renderer-specific API schemas applied to the product prim. + """ +) +{ + uniform token aspectRatioConformPolicy = "expandAperture" ( + allowedTokens = ["expandAperture", "cropAperture", "adjustApertureWidth", "adjustApertureHeight", "adjustPixelAspectRatio"] + doc = '''Indicates the policy to use to resolve an aspect + ratio mismatch between the camera aperture and image settings. + + This policy allows a standard render setting to do something + reasonable given varying camera inputs. + + The camera aperture aspect ratio is determined by the + aperture atributes on the UsdGeomCamera. + + The image aspect ratio is determined by the resolution and + pixelAspectRatio attributes in the render settings. + + - "expandAperture": if necessary, expand the aperture to + fit the image, exposing additional scene content + - "cropAperture": if necessary, crop the aperture to fit + the image, cropping scene content + - "adjustApertureWidth": if necessary, adjust aperture width + to make its aspect ratio match the image + - "adjustApertureHeight": if necessary, adjust aperture height + to make its aspect ratio match the image + - "adjustPixelAspectRatio": compute pixelAspectRatio to + make the image exactly cover the aperture; disregards + existing attribute value of pixelAspectRatio + ''' + ) + rel camera ( + doc = """The _camera_ relationship specifies the primary + camera to use in a render. It must target a UsdGeomCamera.""" + ) + uniform float4 dataWindowNDC = (0, 0, 1, 1) ( + doc = """dataWindowNDC specifies the axis-aligned rectangular + region in the adjusted aperture window within which the renderer + should produce data. + + It is specified as (xmin, ymin, xmax, ymax) in normalized + device coordinates, where the range 0 to 1 corresponds to the + aperture. (0,0) corresponds to the bottom-left + corner and (1,1) corresponds to the upper-right corner. + + Specifying a window outside the unit square will produce + overscan data. Specifying a window that does not cover the unit + square will produce a cropped render. + + A pixel is included in the rendered result if the pixel + center is contained by the data window. This is consistent + with standard rules used by polygon rasterization engines. + \\ref UsdRenderRasterization + + The data window is expressed in NDC so that cropping and + overscan may be resolution independent. In interactive + workflows, incremental cropping and resolution adjustment + may be intermixed to isolate and examine parts of the scene. + In compositing workflows, overscan may be used to support + image post-processing kernels, and reduced-resolution proxy + renders may be used for faster iteration. + + The dataWindow:ndc coordinate system references the + aperture after any adjustments required by + aspectRatioConformPolicy. + """ + ) + uniform bool disableMotionBlur = 0 ( + doc = """Disable all motion blur by setting the shutter interval + of the targeted camera to [0,0] - that is, take only one sample, + namely at the current time code.""" + ) + uniform bool instantaneousShutter = 0 ( + doc = """Deprecated - use disableMotionBlur instead. Override + the targeted _camera_'s _shutterClose_ to be equal to the + value of its _shutterOpen_, to produce a zero-width shutter + interval. This gives us a convenient way to disable motion + blur.""" + ) + rel orderedVars ( + doc = """Specifies the RenderVars that should be consumed and + combined into the final product. If ordering is relevant to the + output driver, then the ordering of targets in this relationship + provides the order to use.""" + ) + uniform float pixelAspectRatio = 1 ( + doc = """The aspect ratio (width/height) of image pixels.. + The default ratio 1.0 indicates square pixels.""" + ) + token productName = "" ( + doc = """Specifies the name that the output/display driver + should give the product. This is provided as-authored to the + driver, whose responsibility it is to situate the product on a + filesystem or other storage, in the desired location.""" + ) + uniform token productType = "raster" ( + doc = '''The type of output to produce. + The default, "raster", indicates a 2D image. + + \\note In the future, UsdRender may define additional product + types.''' + ) + uniform int2 resolution = (2048, 1080) ( + doc = """The image pixel resolution, corresponding to the + camera's screen window.""" + ) +} + +class RenderPass "RenderPass" ( + apiSchemas = ["CollectionAPI:renderVisibility"] + customData = { + token[] apiSchemaOverridePropertyNames = ["collection:renderVisibility:includeRoot"] + } + doc = """A RenderPass prim encapsulates the necessary information + to generate multipass renders. It houses properties for generating + dependencies and the necessary commands to run to generate renders, as + well as visibility controls for the scene. While RenderSettings + describes the information needed to generate images from a single + invocation of a renderer, RenderPass describes the additional information + needed to generate a time varying set of images. + + There are two consumers of RenderPass prims - a runtime executable that + generates images from usdRender prims, and pipeline specific code that + translates between usdRender prims and the pipeline's resource scheduling + software. We'll refer to the latter as 'job submission code'. + + \\anchor usdRender_renderVisibility + The objects that are relevant to the render is specified via the + renderVisibility collection (UsdCollectionAPI) and can be accessed via + GetRenderVisibilityCollectionAPI(). This collection has includeRoot set to + true so that all objects participate in the render by default. To render + only a specific set of objects, there are two options. One is to modify the + collection paths to explicitly exclude objects that don't participate in + the render, assuming it is known; the other option is to set includeRoot to + false and explicitly include the desired objects. These are complementary + approaches that may each be preferable depending on the scenario. + + The name of the prim is used as the pass's name. + """ +) +{ + uniform bool collection:renderVisibility:includeRoot = 1 + uniform string[] command ( + doc = '''The command to run in order to generate + renders for this pass. The job submission code can use + this to properly send tasks to the job scheduling software + that will generate products. + + The command can contain variables that will be substituted + appropriately during submission, as seen in the example below + with {fileName}. + + For example: + command[0] = "prman" + command[1] = "-progress" + command[2] = "-pixelvariance" + command[3] = "-0.15" + command[4] = "{fileName}" # the fileName property will be substituted + ''' + ) + uniform bool denoise:enable = 0 ( + doc = "When True, this Pass pass should be denoised." + ) + rel denoise:pass ( + doc = """The The UsdRenderDenoisePass prim from which to + source denoise settings. + """ + ) + uniform asset fileName ( + doc = """The asset that contains the rendering prims or other + information needed to render this pass. + """ + ) + rel inputPasses ( + doc = """The set of other Passes that this Pass depends on + in order to be constructed properly. For example, a Pass A + may generate a texture, which is then used as an input to + Pass B. + + By default, usdRender makes some assumptions about the + relationship between this prim and the prims listed in inputPasses. + Namely, when per-frame tasks are generated from these pass prims, + usdRender will assume a one-to-one relationship between tasks + that share their frame number. Consider a pass named 'composite' + whose _inputPasses_ targets a Pass prim named 'beauty`. + By default, each frame for 'composite' will depend on the + same frame from 'beauty': + beauty.1 -> composite.1 + beauty.2 -> composite.2 + etc + + The consumer of this RenderPass graph of inputs will need to resolve + the transitive dependencies. + """ + ) + uniform token passType ( + doc = """A string used to categorize differently structured + or executed types of passes within a customized pipeline. + + For example, when multiple DCC's (e.g. Houdini, Katana, Nuke) + each compute and contribute different Products to a final result, + it may be clearest and most flexible to create a separate + RenderPass for each. + """ + ) + rel renderSource ( + doc = """The source prim to render from. If _fileName_ is not present, + the source is assumed to be a RenderSettings prim present in the current + Usd stage. If fileName is present, the source should be found in the + file there. This relationship might target a string attribute on this + or another prim that identifies the appropriate object in the external + container. + + For example, for a Usd-backed pass, this would point to a RenderSettings + prim. Houdini passes would point to a Rop. Nuke passes would point to + a write node. + """ + ) +} + +class RenderDenoisePass "RenderDenoisePass" ( + doc = """A RenderDenoisePass generates renders via a denoising process. + This may be the same renderer that a pipeline uses for UsdRender, + or may be a separate one. Notably, a RenderDenoisePass requires + another Pass to be present for it to operate. The denoising process + itself is not generative, and requires images inputs to operate. + + As denoising integration varies so widely across pipelines, all + implementation details are left to pipeline-specific prims + that inherit from RenderDenoisePass. + """ +) +{ +} + diff --git a/blender/lib/usd/usdRender/resources/plugInfo.json b/blender/lib/usd/usdRender/resources/plugInfo.json new file mode 100644 index 0000000..e3ecb26 --- /dev/null +++ b/blender/lib/usd/usdRender/resources/plugInfo.json @@ -0,0 +1,85 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "renderSettingsPrimPath": { + "appliesTo": "layers", + "default": "", + "type": "string" + } + }, + "Types": { + "UsdRenderDenoisePass": { + "alias": { + "UsdSchemaBase": "RenderDenoisePass" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdRenderPass": { + "alias": { + "UsdSchemaBase": "RenderPass" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdRenderProduct": { + "alias": { + "UsdSchemaBase": "RenderProduct" + }, + "autoGenerated": true, + "bases": [ + "UsdRenderSettingsBase" + ], + "schemaKind": "concreteTyped" + }, + "UsdRenderSettings": { + "alias": { + "UsdSchemaBase": "RenderSettings" + }, + "autoGenerated": true, + "bases": [ + "UsdRenderSettingsBase" + ], + "schemaKind": "concreteTyped" + }, + "UsdRenderSettingsBase": { + "alias": { + "UsdSchemaBase": "RenderSettingsBase" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "abstractTyped" + }, + "UsdRenderVar": { + "alias": { + "UsdSchemaBase": "RenderVar" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + } + } + }, + "LibraryPath": "", + "Name": "usdRender", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdRender/resources/usdRender/schema.usda b/blender/lib/usd/usdRender/resources/usdRender/schema.usda new file mode 100644 index 0000000..90963d3 --- /dev/null +++ b/blender/lib/usd/usdRender/resources/usdRender/schema.usda @@ -0,0 +1,400 @@ +#usda 1.0 +( + "This file describes the USD Render schemata for code generation." + subLayers = [ + @usd/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdRender" + string libraryPath = "pxr/usd/usdRender" + dictionary libraryTokens = { + dictionary renderSettingsPrimPath = { + string doc = """Stage-level metadata that encodes the + path to UsdRenderSettingsPrim to use for rendering.""" + } + dictionary renderVisibility = { + string doc = """ + This token represents the collection name to use + with UsdCollectionAPI to represent renderVisibility + of a RenderPass prim. + """ + } + } + } +) +{ +} + +class "RenderSettingsBase" ( + inherits = + doc = """Abstract base class that defines render settings that + can be specified on either a RenderSettings prim or a RenderProduct + prim.""" + customData = { + string className = "SettingsBase" + } +) +{ + rel camera ( + doc = """The _camera_ relationship specifies the primary + camera to use in a render. It must target a UsdGeomCamera.""" + ) + uniform int2 resolution = (2048, 1080) ( + doc = """The image pixel resolution, corresponding to the + camera's screen window.""" + ) + uniform float pixelAspectRatio = 1.0 ( + doc = """The aspect ratio (width/height) of image pixels.. + The default ratio 1.0 indicates square pixels.""" + ) + uniform token aspectRatioConformPolicy = "expandAperture" ( + doc = """Indicates the policy to use to resolve an aspect + ratio mismatch between the camera aperture and image settings. + + This policy allows a standard render setting to do something + reasonable given varying camera inputs. + + The camera aperture aspect ratio is determined by the + aperture atributes on the UsdGeomCamera. + + The image aspect ratio is determined by the resolution and + pixelAspectRatio attributes in the render settings. + + - "expandAperture": if necessary, expand the aperture to + fit the image, exposing additional scene content + - "cropAperture": if necessary, crop the aperture to fit + the image, cropping scene content + - "adjustApertureWidth": if necessary, adjust aperture width + to make its aspect ratio match the image + - "adjustApertureHeight": if necessary, adjust aperture height + to make its aspect ratio match the image + - "adjustPixelAspectRatio": compute pixelAspectRatio to + make the image exactly cover the aperture; disregards + existing attribute value of pixelAspectRatio + """ + allowedTokens = ["expandAperture", "cropAperture", "adjustApertureWidth", "adjustApertureHeight", "adjustPixelAspectRatio"] + ) + uniform float4 dataWindowNDC = (0.0, 0.0, 1.0, 1.0) ( + doc = """dataWindowNDC specifies the axis-aligned rectangular + region in the adjusted aperture window within which the renderer + should produce data. + + It is specified as (xmin, ymin, xmax, ymax) in normalized + device coordinates, where the range 0 to 1 corresponds to the + aperture. (0,0) corresponds to the bottom-left + corner and (1,1) corresponds to the upper-right corner. + + Specifying a window outside the unit square will produce + overscan data. Specifying a window that does not cover the unit + square will produce a cropped render. + + A pixel is included in the rendered result if the pixel + center is contained by the data window. This is consistent + with standard rules used by polygon rasterization engines. + \\ref UsdRenderRasterization + + The data window is expressed in NDC so that cropping and + overscan may be resolution independent. In interactive + workflows, incremental cropping and resolution adjustment + may be intermixed to isolate and examine parts of the scene. + In compositing workflows, overscan may be used to support + image post-processing kernels, and reduced-resolution proxy + renders may be used for faster iteration. + + The dataWindow:ndc coordinate system references the + aperture after any adjustments required by + aspectRatioConformPolicy. + """ + ) + uniform bool instantaneousShutter = false ( + doc = """Deprecated - use disableMotionBlur instead. Override + the targeted _camera_'s _shutterClose_ to be equal to the + value of its _shutterOpen_, to produce a zero-width shutter + interval. This gives us a convenient way to disable motion + blur.""" + ) + uniform bool disableMotionBlur = false ( + doc = """Disable all motion blur by setting the shutter interval + of the targeted camera to [0,0] - that is, take only one sample, + namely at the current time code.""" + ) +} + +class RenderSettings "RenderSettings" ( + inherits = + doc = """A UsdRenderSettings prim specifies global settings for + a render process, including an enumeration of the RenderProducts + that should result, and the UsdGeomImageable purposes that should + be rendered. \\ref UsdRenderHowSettingsAffectRendering""" + customData = { + string className = "Settings" + } +) +{ + rel products ( + doc = """The set of RenderProducts the render should produce. + This relationship should target UsdRenderProduct prims. + If no _products_ are specified, an application should produce + an rgb image according to the RenderSettings configuration, + to a default display or image name.""" + ) + uniform token[] includedPurposes = ["default", "render"] ( + doc = """The list of UsdGeomImageable _purpose_ values that + should be included in the render. Note this cannot be + specified per-RenderProduct because it is a statement of + which geometry is present.""" + ) + uniform token[] materialBindingPurposes = ["full", ""] ( + allowedTokens = ["full", "preview", ""] + doc = """Ordered list of material purposes to consider when + resolving material bindings in the scene. The empty string + indicates the "allPurpose" binding.""" + ) + uniform token renderingColorSpace ( + doc = """Describes a renderer's working (linear) colorSpace where all + the renderer/shader math is expected to happen. When no + renderingColorSpace is provided, renderer should use its own default.""" + ) +} + +class RenderVar "RenderVar" ( + inherits = + doc = """A UsdRenderVar describes a custom data variable for + a render to produce. The prim describes the source of the data, which + can be a shader output or an LPE (Light Path Expression), and also + allows encoding of (generally renderer-specific) parameters that + configure the renderer for computing the variable. + + \\note The name of the RenderVar prim drives the name of the data + variable that the renderer will produce. + + \\note In the future, UsdRender may standardize RenderVar representation + for well-known variables under the sourceType `intrinsic`, + such as _r_, _g_, _b_, _a_, _z_, or _id_. + """ + customData = { + string className = "Var" + } +) { + uniform token dataType = "color3f" ( + doc = """The type of this channel, as a USD attribute type.""" + ) + uniform string sourceName = "" ( + doc = """The renderer should look for an output of this name + as the computed value for the RenderVar.""" + ) + uniform token sourceType = "raw" ( + doc = """Indicates the type of the source. + + - "raw": The name should be passed directly to the + renderer. This is the default behavior. + - "primvar": This source represents the name of a primvar. + Some renderers may use this to ensure that the primvar + is provided; other renderers may require that a suitable + material network be provided, in which case this is simply + an advisory setting. + - "lpe": Specifies a Light Path Expression in the + [OSL Light Path Expressions language](https://github.com/imageworks/OpenShadingLanguage/wiki/OSL-Light-Path-Expressions) as the source for + this RenderVar. Some renderers may use extensions to + that syntax, which will necessarily be non-portable. + - "intrinsic": This setting is currently unimplemented, + but represents a future namespace for UsdRender to provide + portable baseline RenderVars, such as camera depth, that + may have varying implementations for each renderer. + """ + allowedTokens = ["raw", "primvar", "lpe", "intrinsic"] + ) +} + +class RenderProduct "RenderProduct" ( + inherits = + doc = """A UsdRenderProduct describes an image or other + file-like artifact produced by a render. A RenderProduct + combines one or more RenderVars into a file or interactive + buffer. It also provides all the controls established in + UsdRenderSettingsBase as optional overrides to whatever the + owning UsdRenderSettings prim dictates. + + Specific renderers may support additional settings, such + as a way to configure compression settings, filetype metadata, + and so forth. Such settings can be encoded using + renderer-specific API schemas applied to the product prim. + """ + customData = { + string className = "Product" + dictionary schemaTokens = { + dictionary raster = { + string doc = """RenderProduct productType value that + indicates a 2D raster image of pixels.""" + } + } + } +) { + uniform token productType = "raster" ( + doc = """The type of output to produce. + The default, "raster", indicates a 2D image. + + \\note In the future, UsdRender may define additional product + types.""" + ) + token productName = "" ( + doc = """Specifies the name that the output/display driver + should give the product. This is provided as-authored to the + driver, whose responsibility it is to situate the product on a + filesystem or other storage, in the desired location.""" + ) + rel orderedVars ( + doc = """Specifies the RenderVars that should be consumed and + combined into the final product. If ordering is relevant to the + output driver, then the ordering of targets in this relationship + provides the order to use.""" + ) +} + + +class RenderPass "RenderPass" ( + inherits = + doc = """A RenderPass prim encapsulates the necessary information + to generate multipass renders. It houses properties for generating + dependencies and the necessary commands to run to generate renders, as + well as visibility controls for the scene. While RenderSettings + describes the information needed to generate images from a single + invocation of a renderer, RenderPass describes the additional information + needed to generate a time varying set of images. + + There are two consumers of RenderPass prims - a runtime executable that + generates images from usdRender prims, and pipeline specific code that + translates between usdRender prims and the pipeline's resource scheduling + software. We'll refer to the latter as 'job submission code'. + + \\anchor usdRender_renderVisibility + The objects that are relevant to the render is specified via the + renderVisibility collection (UsdCollectionAPI) and can be accessed via + GetRenderVisibilityCollectionAPI(). This collection has includeRoot set to + true so that all objects participate in the render by default. To render + only a specific set of objects, there are two options. One is to modify the + collection paths to explicitly exclude objects that don't participate in + the render, assuming it is known; the other option is to set includeRoot to + false and explicitly include the desired objects. These are complementary + approaches that may each be preferable depending on the scenario. + + The name of the prim is used as the pass's name. + """ + customData = { + string className = "Pass" + string extraIncludes = """ +#include "pxr/usd/usd/collectionAPI.h" """ + } + prepend apiSchemas = ["CollectionAPI:renderVisibility"] +) { + uniform token passType ( + doc = """A string used to categorize differently structured + or executed types of passes within a customized pipeline. + + For example, when multiple DCC's (e.g. Houdini, Katana, Nuke) + each compute and contribute different Products to a final result, + it may be clearest and most flexible to create a separate + RenderPass for each. + """ + ) + + uniform string[] command ( + doc = """The command to run in order to generate + renders for this pass. The job submission code can use + this to properly send tasks to the job scheduling software + that will generate products. + + The command can contain variables that will be substituted + appropriately during submission, as seen in the example below + with {fileName}. + + For example: + command[0] = "prman" + command[1] = "-progress" + command[2] = "-pixelvariance" + command[3] = "-0.15" + command[4] = "{fileName}" # the fileName property will be substituted + """ + ) + + rel renderSource ( + doc = """The source prim to render from. If _fileName_ is not present, + the source is assumed to be a RenderSettings prim present in the current + Usd stage. If fileName is present, the source should be found in the + file there. This relationship might target a string attribute on this + or another prim that identifies the appropriate object in the external + container. + + For example, for a Usd-backed pass, this would point to a RenderSettings + prim. Houdini passes would point to a Rop. Nuke passes would point to + a write node. + """ + ) + + rel inputPasses ( + doc = """The set of other Passes that this Pass depends on + in order to be constructed properly. For example, a Pass A + may generate a texture, which is then used as an input to + Pass B. + + By default, usdRender makes some assumptions about the + relationship between this prim and the prims listed in inputPasses. + Namely, when per-frame tasks are generated from these pass prims, + usdRender will assume a one-to-one relationship between tasks + that share their frame number. Consider a pass named 'composite' + whose _inputPasses_ targets a Pass prim named 'beauty`. + By default, each frame for 'composite' will depend on the + same frame from 'beauty': + beauty.1 -> composite.1 + beauty.2 -> composite.2 + etc + + The consumer of this RenderPass graph of inputs will need to resolve + the transitive dependencies. + """ + ) + + uniform asset fileName ( + doc = """The asset that contains the rendering prims or other + information needed to render this pass. + """ + ) + + uniform bool denoise:enable = false ( + doc = """When True, this Pass pass should be denoised.""" + ) + + rel denoise:pass ( + doc = """The The UsdRenderDenoisePass prim from which to + source denoise settings. + """ + ) + + uniform bool collection:renderVisibility:includeRoot = 1 ( + customData = { + bool apiSchemaOverride = true + } + ) +} + + +class RenderDenoisePass "RenderDenoisePass" ( + inherits = + doc = """A RenderDenoisePass generates renders via a denoising process. + This may be the same renderer that a pipeline uses for UsdRender, + or may be a separate one. Notably, a RenderDenoisePass requires + another Pass to be present for it to operate. The denoising process + itself is not generative, and requires images inputs to operate. + + As denoising integration varies so widely across pipelines, all + implementation details are left to pipeline-specific prims + that inherit from RenderDenoisePass. + """ + customData = { + string className = "DenoisePass" + } +) { +} diff --git a/blender/lib/usd/usdRi/resources/generatedSchema.usda b/blender/lib/usd/usdRi/resources/generatedSchema.usda new file mode 100644 index 0000000..a251c0e --- /dev/null +++ b/blender/lib/usd/usdRi/resources/generatedSchema.usda @@ -0,0 +1,94 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class "StatementsAPI" ( + doc = """Container namespace schema for all renderman statements. + + \\note The longer term goal is for clients to go directly to primvar + or render-attribute API's, instead of using UsdRi StatementsAPI + for inherited attributes. Anticpating this, StatementsAPI + can smooth the way via a few environment variables: + + * USDRI_STATEMENTS_READ_OLD_ENCODING: Causes StatementsAPI to read + old-style attributes instead of primvars in the \"ri:\" + namespace. + """ +) +{ +} + +class "RiMaterialAPI" ( + doc = """ + \\deprecated Materials should use UsdShadeMaterial instead. + This schema will be removed in a future release. + + This API provides outputs that connect a material prim to prman + shaders and RIS objects.""" +) +{ + token outputs:ri:displacement ( + displayGroup = "Outputs" + ) + token outputs:ri:surface ( + displayGroup = "Outputs" + ) + token outputs:ri:volume ( + displayGroup = "Outputs" + ) +} + +class "RiSplineAPI" ( + doc = ''' + \\deprecated This API schema will be removed in a future release. + + RiSplineAPI is a general purpose API schema used to describe + a named spline stored as a set of attributes on a prim. + + It is an add-on schema that can be applied many times to a prim with + different spline names. All the attributes authored by the schema + are namespaced under "$NAME:spline:", with the name of the + spline providing a namespace for the attributes. + + The spline describes a 2D piecewise cubic curve with a position and + value for each knot. This is chosen to give straightforward artistic + control over the shape. The supported basis types are: + + - linear (UsdRiTokens->linear) + - bspline (UsdRiTokens->bspline) + - Catmull-Rom (UsdRiTokens->catmullRom) + ''' +) +{ +} + +class "RiRenderPassAPI" ( + apiSchemas = ["CollectionAPI:matte", "CollectionAPI:cameraVisibility"] + customData = { + token[] apiSchemaOverridePropertyNames = ["collection:cameraVisibility:includeRoot"] + } + doc = """ + RiRenderPassAPI is an API schema that provides a mechanism + to set certain Ri statements on each prim in a collection, + for a given RenderPass prim. + + \\anchor usdRi_cameraVisibility + The objects that are relevant to the render is specified via the + cameraVisibility collection (UsdCollectionAPI) and can be accessed via + GetCameraVisibilityCollectionAPI(). Each prim in the collection will have + ri:visible:camera set to 1. By default everything in the scene should be + visible to camera, so this collection sets includeRoot to 1. + + \\anchor usdRi_matte + The objects that are relevant to the render is specified via the + matte collection (UsdCollectionAPI) and can be accessed via + GetMatteCollectionAPI(). Each prim in the collection will have + ri:matte set to 1. By default everything in the scene should render + normally, so this collection sets includeRoot to 0. + """ +) +{ + uniform bool collection:cameraVisibility:includeRoot = 1 +} + diff --git a/blender/lib/usd/usdRi/resources/plugInfo.json b/blender/lib/usd/usdRi/resources/plugInfo.json new file mode 100644 index 0000000..cbeb020 --- /dev/null +++ b/blender/lib/usd/usdRi/resources/plugInfo.json @@ -0,0 +1,64 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdRiMaterialAPI": { + "alias": { + "UsdSchemaBase": "RiMaterialAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdRiRenderPassAPI": { + "alias": { + "UsdSchemaBase": "RiRenderPassAPI" + }, + "apiSchemaAutoApplyTo": [ + "RenderPass" + ], + "apiSchemaCanOnlyApplyTo": [ + "RenderPass" + ], + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdRiSplineAPI": { + "alias": { + "UsdSchemaBase": "RiSplineAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdRiStatementsAPI": { + "alias": { + "UsdSchemaBase": "StatementsAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + } + } + }, + "LibraryPath": "", + "Name": "usdRi", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdRi/resources/usdRi/schema.usda b/blender/lib/usd/usdRi/resources/usdRi/schema.usda new file mode 100644 index 0000000..f8a6fa2 --- /dev/null +++ b/blender/lib/usd/usdRi/resources/usdRi/schema.usda @@ -0,0 +1,200 @@ +#usda 1.0 +( + "This file describes the USD RenderMan Interface (UsdRi) schemata for code generation." + subLayers = [ + @usd/schema.usda@, + @usdShade/schema.usda@, + ] +) + +def "GLOBAL" ( + customData = { + string libraryName = "usdRi" + string libraryPath = "pxr/usd/usdRi" + # string libraryPrefix = "UsdRi" + # string tokensPrefix = "UsdRi" + # dictionary libraryTokens = {} + dictionary libraryTokens = { + dictionary "renderContext" = { + string value = "ri" + string doc = "UsdShadeMaterial / Hydra render context token for UsdRi" + } + dictionary spline = { + string doc = "UsdSplineAPI - Namespace for spline attributes" + } + dictionary interpolation = { + string doc = "UsdSplineAPI - Interpolation attribute name" + } + dictionary positions = { + string doc = "UsdSplineAPI - Positions attribute name" + } + dictionary values = { + string doc = "UsdSplineAPI - values attribute name" + } + dictionary linear = { + string doc = "UsdSplineAPI - Linear spline interpolation" + } + dictionary bspline = { + string doc = "UsdSplineAPI - BSpline spline interpolation" + } + dictionary "catmullRom" = { + string value = "catmull-rom" + string doc = "UsdSplineAPI - Catmull-Rom spline interpolation" + } + dictionary constant = { + string doc = "UsdSplineAPI - Constant-value spline interpolation" + } + dictionary cameraVisibility = { + string doc = """ + UsdRenderPassAPI - This token represents the collection + name to use with UsdCollectionAPI to set the camera visibility + attribute on the prims in the collection for the RenderPass. + """ + } + dictionary matte = { + string doc = """ + UsdRenderPassAPI - This token represents the collection + name to use with UsdCollectionAPI to set the matte + attribute on the prims in the collection for the RenderPass. + """ + } + } + } +) +{ +} + +class "StatementsAPI" ( + inherits = + doc = """Container namespace schema for all renderman statements. + + \\note The longer term goal is for clients to go directly to primvar + or render-attribute API's, instead of using UsdRi StatementsAPI + for inherited attributes. Anticpating this, StatementsAPI + can smooth the way via a few environment variables: + + * USDRI_STATEMENTS_READ_OLD_ENCODING: Causes StatementsAPI to read + old-style attributes instead of primvars in the "ri:" + namespace. + """ + customData = { + string extraIncludes = """ +#include "pxr/usd/usdGeom/primvarsAPI.h" +""" + } +) { +} + +### Material API common to both RSL and RIS ### + +class "RiMaterialAPI" ( + inherits = + doc = """ + \\deprecated Materials should use UsdShadeMaterial instead. + This schema will be removed in a future release. + + This API provides outputs that connect a material prim to prman + shaders and RIS objects.""" + customData = { + # We want the class name to be UsdRiMaterialAPI + string className = "MaterialAPI" + string extraIncludes = """ +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" +#include "pxr/usd/usdShade/material.h" +""" + } +) { + token outputs:ri:surface ( + displayGroup = "Outputs" + customData = { + string apiName = "surface" + } + ) + token outputs:ri:displacement ( + displayGroup = "Outputs" + customData = { + string apiName = "displacement" + } + ) + token outputs:ri:volume ( + displayGroup = "Outputs" + customData = { + string apiName = "volume" + } + ) +} + + +######################################################################## +# Lighting API + +class "RiSplineAPI" ( + inherits = + doc = """ + \\deprecated This API schema will be removed in a future release. + + RiSplineAPI is a general purpose API schema used to describe + a named spline stored as a set of attributes on a prim. + + It is an add-on schema that can be applied many times to a prim with + different spline names. All the attributes authored by the schema + are namespaced under "$NAME:spline:", with the name of the + spline providing a namespace for the attributes. + + The spline describes a 2D piecewise cubic curve with a position and + value for each knot. This is chosen to give straightforward artistic + control over the shape. The supported basis types are: + + - linear (UsdRiTokens->linear) + - bspline (UsdRiTokens->bspline) + - Catmull-Rom (UsdRiTokens->catmullRom) + """ + customData = { + string className = "SplineAPI" + } +) { +} + + +######################################################################## +# Render API + +class "RiRenderPassAPI" ( + inherits = + doc = """ + RiRenderPassAPI is an API schema that provides a mechanism + to set certain Ri statements on each prim in a collection, + for a given RenderPass prim. + + \\anchor usdRi_cameraVisibility + The objects that are relevant to the render is specified via the + cameraVisibility collection (UsdCollectionAPI) and can be accessed via + GetCameraVisibilityCollectionAPI(). Each prim in the collection will have + ri:visible:camera set to 1. By default everything in the scene should be + visible to camera, so this collection sets includeRoot to 1. + + \\anchor usdRi_matte + The objects that are relevant to the render is specified via the + matte collection (UsdCollectionAPI) and can be accessed via + GetMatteCollectionAPI(). Each prim in the collection will have + ri:matte set to 1. By default everything in the scene should render + normally, so this collection sets includeRoot to 0. + """ + customData = { + string className = "RenderPassAPI" + token[] apiSchemaAutoApplyTo = ["RenderPass"] + token[] apiSchemaCanOnlyApplyTo = ["RenderPass"] + string extraIncludes = """ +#include "pxr/usd/usd/collectionAPI.h" """ + } + prepend apiSchemas = [ + "CollectionAPI:matte", "CollectionAPI:cameraVisibility"] +) { + + uniform bool collection:cameraVisibility:includeRoot = 1 ( + customData = { + bool apiSchemaOverride = true + } + ) +} \ No newline at end of file diff --git a/blender/lib/usd/usdRiImaging/resources/plugInfo.json b/blender/lib/usd/usdRiImaging/resources/plugInfo.json new file mode 100644 index 0000000..62233e2 --- /dev/null +++ b/blender/lib/usd/usdRiImaging/resources/plugInfo.json @@ -0,0 +1,36 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdRiImagingPxrBarnLightFilterAdapter": { + "bases": [ + "UsdImagingLightFilterAdapter" + ], + "isInternal": true, + "primTypeName": "PxrBarnLightFilter" + }, + "UsdRiImagingPxrIntMultLightFilterAdapter": { + "bases": [ + "UsdImagingLightFilterAdapter" + ], + "isInternal": true, + "primTypeName": "PxrIntMultLightFilter" + }, + "UsdRiImagingPxrRodLightFilterAdapter": { + "bases": [ + "UsdImagingLightFilterAdapter" + ], + "isInternal": true, + "primTypeName": "PxrRodLightFilter" + } + } + }, + "LibraryPath": "", + "Name": "usdRiImaging", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdShade/resources/generatedSchema.usda b/blender/lib/usd/usdShade/resources/generatedSchema.usda new file mode 100644 index 0000000..029c843 --- /dev/null +++ b/blender/lib/usd/usdShade/resources/generatedSchema.usda @@ -0,0 +1,328 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class NodeGraph "NodeGraph" ( + doc = '''A node-graph is a container for shading nodes, as well as other + node-graphs. It has a public input interface and provides a list of public + outputs. + + Node Graph Interfaces + + One of the most important functions of a node-graph is to host the "interface" + with which clients of already-built shading networks will interact. Please + see "Interface Inputs" for a detailed + explanation of what the interface provides, and how to construct and + use it, to effectively share/instance shader networks. + + Node Graph Outputs + + These behave like outputs on a shader and are typically connected to an + output on a shader inside the node-graph. + ''' +) +{ +} + +class Material "Material" ( + doc = """A Material provides a container into which multiple \"render targets\" + can add data that defines a \"shading material\" for a renderer. Typically + this consists of one or more UsdRelationship properties that target + other prims of type Shader - though a target/client is free to add + any data that is suitable. We strongly advise that all targets + adopt the convention that all properties be prefixed with a namespace + that identifies the target, e.g. \"rel ri:surface = \". + + ## Binding Materials + + In the UsdShading model, geometry expresses a binding to a single Material or + to a set of Materials partitioned by UsdGeomSubsets defined beneath the + geometry; it is legal to bind a Material at the root (or other sub-prim) of + a model, and then bind a different Material to individual gprims, but the + meaning of inheritance and \"ancestral overriding\" of Material bindings is + left to each render-target to determine. Since UsdGeom has no concept of + shading, we provide the API for binding and unbinding geometry on the API + schema UsdShadeMaterialBindingAPI. + + ## Material Variation + + The entire power of USD VariantSets and all the other composition + operators can leveraged when encoding shading variation. + UsdShadeMaterial provides facilities for a particular way of building + \"Material variants\" in which neither the identity of the Materials themselves + nor the geometry Material-bindings need to change - instead we vary the + targeted networks, interface values, and even parameter values within + a single variantSet. + See \"Authoring Material Variations\" + for more details. + + ## Materials Encapsulate their Networks in Namespace + + UsdShade requires that all of the shaders that \"belong\" to the Material + live under the Material in namespace. This supports powerful, easy reuse + of Materials, because it allows us to *reference* a Material from one + asset (the asset might be a library of Materials) into another asset: USD + references compose all descendant prims of the reference target into the + referencer's namespace, which means that all of the referenced Material's + shader networks will come along with the Material. When referenced in this + way, Materials can also be [instanced](http://openusd.org/docs/USD-Glossary.html#USDGlossary-Instancing), for ease of deduplication and compactness. + Finally, Material encapsulation also allows us to + \"specialize\" child materials from + parent materials. + + """ +) +{ + token outputs:displacement ( + displayGroup = "Outputs" + doc = '''Represents the universal "displacement" output terminal of a + material.''' + ) + token outputs:surface ( + displayGroup = "Outputs" + doc = '''Represents the universal "surface" output terminal of a + material.''' + ) + token outputs:volume ( + displayGroup = "Outputs" + doc = '''Represents the universal "volume" output terminal of a + material.''' + ) +} + +class Shader "Shader" ( + apiSchemas = ["NodeDefAPI"] + doc = '''Base class for all USD shaders. Shaders are the building blocks + of shading networks. While UsdShadeShader objects are not target specific, + each renderer or application target may derive its own renderer-specific + shader object types from this base, if needed. + + Objects of this class generally represent a single shading object, whether + it exists in the target renderer or not. For example, a texture, a fractal, + or a mix node. + + The UsdShadeNodeDefAPI provides attributes to uniquely identify the + type of this node. The id resolution into a renderable shader target + type of this node. The id resolution into a renderable shader target + is deferred to the consuming application. + + The purpose of representing them in Usd is two-fold: + - To represent, via "connections" the topology of the shading network + that must be reconstructed in the renderer. Facilities for authoring and + manipulating connections are encapsulated in the API schema + UsdShadeConnectableAPI. + - To present a (partial or full) interface of typed input parameters + whose values can be set and overridden in Usd, to be provided later at + render-time as parameter values to the actual render shader objects. Shader + input parameters are encapsulated in the property schema UsdShadeInput. + ''' +) +{ +} + +class "NodeDefAPI" ( + doc = '''UsdShadeNodeDefAPI is an API schema that provides attributes + for a prim to select a corresponding Shader Node Definition ("Sdr Node"), + as well as to look up a runtime entry for that shader node in the + form of an SdrShaderNode. + + UsdShadeNodeDefAPI is intended to be a pre-applied API schema for any + prim type that wants to refer to the SdrRegistry for further implementation + details about the behavior of that prim. The primary use in UsdShade + itself is as UsdShadeShader, which is a basis for material shading networks + (UsdShadeMaterial), but this is intended to be used in other domains + that also use the Sdr node mechanism. + + This schema provides properties that allow a prim to identify an external + node definition, either by a direct identifier key into the SdrRegistry + (info:id), an asset to be parsed by a suitable NdrParserPlugin + (info:sourceAsset), or an inline source code that must also be parsed + (info:sourceCode); as well as a selector attribute to determine which + specifier is active (info:implementationSource). + ''' +) +{ + uniform token info:id ( + doc = """The id is an identifier for the type or purpose of the + shader. E.g.: Texture or FractalFloat. + The use of this id will depend on the render target: some will turn it + into an actual shader path, some will use it to generate shader source + code dynamically. + + \\sa SetShaderId() + """ + ) + uniform token info:implementationSource = "id" ( + allowedTokens = ["id", "sourceAsset", "sourceCode"] + doc = """Specifies the attribute that should be consulted to get the + shader's implementation or its source code. + + * If set to \"id\", the \"info:id\" attribute's value is used to + determine the shader source from the shader registry. + * If set to \"sourceAsset\", the resolved value of the \"info:sourceAsset\" + attribute corresponding to the desired implementation (or source-type) + is used to locate the shader source. A source asset file may also + specify multiple shader definitions, so there is an optional attribute + \"info:sourceAsset:subIdentifier\" whose value should be used to indicate + a particular shader definition from a source asset file. + * If set to \"sourceCode\", the value of \"info:sourceCode\" attribute + corresponding to the desired implementation (or source type) is used as + the shader source. + """ + ) +} + +class "ConnectableAPI" ( + doc = """UsdShadeConnectableAPI is an API schema that provides a common + interface for creating outputs and making connections between shading + parameters and outputs. The interface is common to all UsdShade schemas + that support Inputs and Outputs, which currently includes UsdShadeShader, + UsdShadeNodeGraph, and UsdShadeMaterial . + + One can construct a UsdShadeConnectableAPI directly from a UsdPrim, or + from objects of any of the schema classes listed above. If it seems + onerous to need to construct a secondary schema object to interact with + Inputs and Outputs, keep in mind that any function whose purpose is either + to walk material/shader networks via their connections, or to create such + networks, can typically be written entirely in terms of + UsdShadeConnectableAPI objects, without needing to care what the underlying + prim type is. + + Additionally, the most common UsdShadeConnectableAPI behaviors + (creating Inputs and Outputs, and making connections) are wrapped as + convenience methods on the prim schema classes (creation) and + UsdShadeInput and UsdShadeOutput. + """ +) +{ +} + +class "MaterialBindingAPI" ( + doc = """UsdShadeMaterialBindingAPI is an API schema that provides an + interface for binding materials to prims or collections of prims + (represented by UsdCollectionAPI objects). + + In the USD shading model, each renderable gprim computes a single + resolved Material that will be used to shade the gprim (exceptions, + of course, for gprims that possess UsdGeomSubsets, as each subset can be + shaded by a different Material). A gprim and each of its ancestor + prims can possess, through the MaterialBindingAPI, both a + direct binding to a Material, and any number of + collection-based bindings to Materials; each binding can be generic + or declared for a particular purpose, and given a specific binding + strength. It is the process of \"material resolution\" (see + that examines all of + these bindings, and selects the one Material that best matches the + client's needs. + + The intent of purpose is that each gprim should be able to resolve a + Material for any given purpose, which implies it can have differently bound + materials for different purposes. There are two special values of + purpose defined in UsdShade, although the API fully supports + specifying arbitrary values for it, for the sake of extensibility: +
  • UsdShadeTokens->full: to be used when the purpose of the + render is entirely to visualize the truest representation of a scene, + considering all lighting and material information, at highest fidelity.
  • +
  • UsdShadeTokens->preview: to be used when the render is in + service of a goal other than a high fidelity \"full\" render (such as scene + manipulation, modeling, or realtime playback). Latency and speed are + generally of greater concern for preview renders, therefore preview + materials are generally designed to be \"lighterweight\" compared to full + materials.
+ A binding can also have no specific purpose at all, in which + case, it is considered to be the fallback or all-purpose binding (denoted + by the empty-valued token UsdShadeTokens->allPurpose). + + The purpose of a material binding is encoded in the name of the + binding relationship. +
  • + In the case of a direct binding, the allPurpose binding is + represented by the relationship named \"material:binding\". + Special-purpose direct bindings are represented by relationships named + \"material:binding:purpose. A direct binding relationship + must have a single target path that points to a UsdShadeMaterial.
  • +
  • + In the case of a collection-based binding, the allPurpose binding is + represented by a relationship named + \"material:binding:collection:bindingName\", where + bindingName establishes an identity for the binding that is unique + on the prim. Attempting to establish two collection bindings of the same + name on the same prim will result in the first binding simply being + overridden. A special-purpose collection-based binding is represented by a + relationship named \"material:binding:collection:purpose:bindingName\". + A collection-based binding relationship must have exacly two targets, one of + which should be a collection-path (see + and the other should point to a + UsdShadeMaterial. In the future, we may allow a single collection + binding to target multiple collections, if we can establish a reasonable + round-tripping pattern for applications that only allow a single collection + to be associated with each Material. +
  • +
+ + Note: Both bindingName and purpose must be + non-namespaced tokens. This allows us to know the role of a binding + relationship simply from the number of tokens in it. +
  • Two tokens: the fallback, \"all purpose\", direct binding, + material:binding
  • +
  • Three tokens: a purpose-restricted, direct, fallback binding, + e.g. material:binding:preview
  • +
  • Four tokens: an all-purpose, collection-based binding, e.g. + material:binding:collection:metalBits
  • +
  • Five tokens: a purpose-restricted, collection-based binding, + e.g. material:binding:collection:full:metalBits
  • +
+ + A binding-strength value is used to specify whether a binding + authored on a prim should be weaker or stronger than bindings that appear + lower in namespace. We encode the binding strength with as token-valued + metadata 'bindMaterialAs' for future flexibility, even though for + now, there are only two possible values: + UsdShadeTokens->weakerThanDescendants and + UsdShadeTokens->strongerThanDescendants. When binding-strength is + not authored (i.e. empty) on a binding-relationship, the default behavior + matches UsdShadeTokens->weakerThanDescendants. + + \\note If a material binding relationship is a built-in property defined as + part of a typed prim's schema, a fallback value should not be provided for + it. This is because the \"material resolution\" algorithm only conisders + authored properties. + """ +) +{ +} + +class "CoordSysAPI" ( + doc = '''UsdShadeCoordSysAPI provides a way to designate, name, + and discover coordinate systems. + + Coordinate systems are implicitly established by UsdGeomXformable + prims, using their local space. That coordinate system may be + bound (i.e., named) from another prim. The binding is encoded + as a single-target relationship. + Coordinate system bindings apply to descendants of the prim + where the binding is expressed, but names may be re-bound by + descendant prims. + + CoordSysAPI is a multi-apply API schema, where instance names + signify the named coordinate systems. The instance names are + used with the "coordSys:" namespace to determine the binding + to the UsdGeomXformable prim. + + Named coordinate systems are useful in shading (and other) workflows. + An example is projection paint, which projects a texture + from a certain view (the paint coordinate system), encoded as + (e.g.) "rel coordSys:paint:binding". Using the paint coordinate frame + avoids the need to assign a UV set to the object, and can be a + concise way to project paint across a collection of objects with + a single shared paint coordinate system. + ''' +) +{ + rel coordSys:__INSTANCE_NAME__:binding ( + displayName = "Bound Coordinate System" + doc = "Prim binding expressing the appropriate coordinate systems." + ) +} + diff --git a/blender/lib/usd/usdShade/resources/plugInfo.json b/blender/lib/usd/usdShade/resources/plugInfo.json new file mode 100644 index 0000000..3990591 --- /dev/null +++ b/blender/lib/usd/usdShade/resources/plugInfo.json @@ -0,0 +1,138 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "bindMaterialAs": { + "appliesTo": [ + "relationships" + ], + "displayGroup": "Shading", + "documentation": "Metadata authored on collection-based material binding relationship to indicate the strength of the binding relative to bindings authored on descendant prims.", + "type": "token" + }, + "connectability": { + "appliesTo": [ + "attributes" + ], + "default": "full", + "displayGroup": "Shading", + "documentation": "Metadata authored on UsdShadeInput's to specify what they can be connected to. Can be either \"full\" or \"interfaceOnly\". \"full\" implies that the input can be connected to any other input or output. \"interfaceOnly\" implies that the input can only connect to a NodeGraph Input (which represents an interface override, not a render-time dataflow connection), or another Input whose connectability is also \"interfaceOnly\".", + "type": "token" + }, + "outputName": { + "appliesTo": [ + "relationships" + ], + "displayGroup": "deprecated", + "type": "token" + }, + "renderType": { + "appliesTo": [ + "properties" + ], + "displayGroup": "Rendering", + "type": "token" + }, + "sdrMetadata": { + "appliesTo": [ + "prims", + "attributes" + ], + "displayGroup": "Shading", + "type": "dictionary" + } + }, + "Types": { + "UsdShadeConnectableAPI": { + "alias": { + "UsdSchemaBase": "ConnectableAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "nonAppliedAPI" + }, + "UsdShadeCoordSysAPI": { + "alias": { + "UsdSchemaBase": "CoordSysAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "multipleApplyAPI" + }, + "UsdShadeMaterial": { + "alias": { + "UsdSchemaBase": "Material" + }, + "autoGenerated": true, + "bases": [ + "UsdShadeNodeGraph" + ], + "providesUsdShadeConnectableAPIBehavior": true, + "schemaKind": "concreteTyped" + }, + "UsdShadeMaterialBindingAPI": { + "alias": { + "UsdSchemaBase": "MaterialBindingAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdShadeNodeDefAPI": { + "alias": { + "UsdSchemaBase": "NodeDefAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdShadeNodeGraph": { + "alias": { + "UsdSchemaBase": "NodeGraph" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "providesUsdShadeConnectableAPIBehavior": true, + "schemaKind": "concreteTyped" + }, + "UsdShadeShader": { + "alias": { + "UsdSchemaBase": "Shader" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "providesUsdShadeConnectableAPIBehavior": true, + "schemaKind": "concreteTyped" + }, + "UsdShadeShaderDefParserPlugin": { + "bases": [ + "NdrParserPlugin" + ], + "displayName": "USD-based shader definition parser plugin" + } + } + }, + "LibraryPath": "", + "Name": "usdShade", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdShade/resources/usdShade/schema.usda b/blender/lib/usd/usdShade/resources/usdShade/schema.usda new file mode 100644 index 0000000..b8fd049 --- /dev/null +++ b/blender/lib/usd/usdShade/resources/usdShade/schema.usda @@ -0,0 +1,594 @@ +#usda 1.0 +( + "This file describes the USD Shader schemata for code generation." + subLayers = [ + @usd/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdShade" + string libraryPath = "pxr/usd/usdShade" + dictionary libraryTokens = { + dictionary materialBind = { + string doc = """The name of the GeomSubset family used to + identify face subsets defined for the purpose of binding + materials to facesets. + """ + } + dictionary full = { + string doc= """Possible value for 'connectability' metadata on + a UsdShadeInput. When connectability of an input is set to + "full", it implies that it can be connected to any input or + output. + """ + } + dictionary interfaceOnly = { + string doc= """Possible value for 'connectability' metadata on + a UsdShadeInput. It implies that the input can only connect to + a NodeGraph Input (which represents an interface override, not + a render-time dataflow connection), or another Input whose + connectability is also 'interfaceOnly'. + """ + } + dictionary outputs = { + string value = "outputs:" + string doc = """The prefix on shading attributes + denoting an output. + """ + } + dictionary inputs = { + string value = "inputs:" + string doc = """The prefix on shading attributes + denoting an input. + """ + } + } + } +) +{ +} + +class NodeGraph "NodeGraph" ( + inherits = + doc = """A node-graph is a container for shading nodes, as well as other + node-graphs. It has a public input interface and provides a list of public + outputs. + + Node Graph Interfaces + + One of the most important functions of a node-graph is to host the "interface" + with which clients of already-built shading networks will interact. Please + see \\ref UsdShadeNodeGraph_Interfaces "Interface Inputs" for a detailed + explanation of what the interface provides, and how to construct and + use it, to effectively share/instance shader networks. + + Node Graph Outputs + + These behave like outputs on a shader and are typically connected to an + output on a shader inside the node-graph. + """ + customData = { + dictionary extraPlugInfo = { + bool providesUsdShadeConnectableAPIBehavior = 1 + } + string extraIncludes = ''' +#include +#include "pxr/usd/usd/editTarget.h" +#include "pxr/usd/usd/relationship.h" +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" +#include "pxr/usd/usdShade/shader.h" +#include "pxr/usd/usdShade/connectableAPIBehavior.h"''' + } +) { + +} + + +class Material "Material" ( + inherits = + doc = """A Material provides a container into which multiple "render targets" + can add data that defines a "shading material" for a renderer. Typically + this consists of one or more UsdRelationship properties that target + other prims of type \\em Shader - though a target/client is free to add + any data that is suitable. We strongly advise that all targets + adopt the convention that all properties be prefixed with a namespace + that identifies the target, e.g. "rel ri:surface = ". + + ## Binding Materials + + In the UsdShading model, geometry expresses a binding to a single Material or + to a set of Materials partitioned by UsdGeomSubsets defined beneath the + geometry; it is legal to bind a Material at the root (or other sub-prim) of + a model, and then bind a different Material to individual gprims, but the + meaning of inheritance and "ancestral overriding" of Material bindings is + left to each render-target to determine. Since UsdGeom has no concept of + shading, we provide the API for binding and unbinding geometry on the API + schema UsdShadeMaterialBindingAPI. + + ## Material Variation + + The entire power of USD VariantSets and all the other composition + operators can leveraged when encoding shading variation. + UsdShadeMaterial provides facilities for a particular way of building + "Material variants" in which neither the identity of the Materials themselves + nor the geometry Material-bindings need to change - instead we vary the + targeted networks, interface values, and even parameter values within + a single variantSet. + See \\ref UsdShadeMaterial_Variations "Authoring Material Variations" + for more details. + + ## Materials Encapsulate their Networks in Namespace + + UsdShade requires that all of the shaders that "belong" to the Material + live under the Material in namespace. This supports powerful, easy reuse + of Materials, because it allows us to *reference* a Material from one + asset (the asset might be a library of Materials) into another asset: USD + references compose all descendant prims of the reference target into the + referencer's namespace, which means that all of the referenced Material's + shader networks will come along with the Material. When referenced in this + way, Materials can also be [instanced](http://openusd.org/docs/USD-Glossary.html#USDGlossary-Instancing), for ease of deduplication and compactness. + Finally, Material encapsulation also allows us to + \\ref UsdShadeMaterial_BaseMaterial "specialize" child materials from + parent materials. + + """ + customData = { + dictionary extraPlugInfo = { + bool providesUsdShadeConnectableAPIBehavior = 1 + } + string extraIncludes = ''' +#include "pxr/usd/usd/variantSets.h" +#include "pxr/usd/usdGeom/subset.h" +#include "pxr/usd/usdShade/connectableAPI.h"''' + dictionary schemaTokens = { + dictionary materialVariant = { + string doc = """The variant name of material variation + described on a UsdShadeMaterial. + """ + } + dictionary surface = { + string doc = """Describes the surface output + terminal on a UsdShadeMaterial. It is used to define the + terminal UsdShadeShader describing the surface of a + UsdShadeMaterial. + """ + } + dictionary displacement = { + string doc = """Describes the displacement output + terminal on a UsdShadeMaterial. It is used to define the + terminal UsdShadeShader describing the displacement of a + UsdShadeMaterial. + """ + } + dictionary volume = { + string doc = """Describes the volume output + terminal on a UsdShadeMaterial. It is used to define the + terminal UsdShadeShader describing the volume of a + UsdShadeMaterial. + """ + } + dictionary universalRenderContext = { + string value = "" + string doc = """Possible value for the "renderContext" parameter + in \\ref UsdShadeMaterial_Outputs API. Represents the universal + renderContext. An output with a universal renderContext is + applicable to all possible rendering contexts. + """ + } + } + } +) +{ + token outputs:surface ( + doc = """Represents the universal "surface" output terminal of a + material.""" + displayGroup = "Outputs" + customData = { + string apiName = "surface" + } + ) + token outputs:displacement ( + doc = """Represents the universal "displacement" output terminal of a + material.""" + displayGroup = "Outputs" + customData = { + string apiName = "displacement" + } + ) + token outputs:volume ( + doc = """Represents the universal "volume" output terminal of a + material.""" + displayGroup = "Outputs" + customData = { + string apiName = "volume" + } + ) +} + +class Shader "Shader" ( + inherits = + prepend apiSchemas = ["NodeDefAPI"] + doc = """Base class for all USD shaders. Shaders are the building blocks + of shading networks. While UsdShadeShader objects are not target specific, + each renderer or application target may derive its own renderer-specific + shader object types from this base, if needed. + + Objects of this class generally represent a single shading object, whether + it exists in the target renderer or not. For example, a texture, a fractal, + or a mix node. + + The UsdShadeNodeDefAPI provides attributes to uniquely identify the + type of this node. The id resolution into a renderable shader target + type of this node. The id resolution into a renderable shader target + is deferred to the consuming application. + + The purpose of representing them in Usd is two-fold: + \\li To represent, via "connections" the topology of the shading network + that must be reconstructed in the renderer. Facilities for authoring and + manipulating connections are encapsulated in the API schema + UsdShadeConnectableAPI. + \\li To present a (partial or full) interface of typed input parameters + whose values can be set and overridden in Usd, to be provided later at + render-time as parameter values to the actual render shader objects. Shader + input parameters are encapsulated in the property schema UsdShadeInput. + """ + customData = { + dictionary extraPlugInfo = { + bool providesUsdShadeConnectableAPIBehavior = 1 + } + string extraIncludes = ''' +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" +#include "pxr/usd/usdShade/tokens.h" +#include "pxr/usd/ndr/declare.h" +#include "pxr/usd/sdr/shaderNode.h"''' + } +) { +} + +class "NodeDefAPI" +( + inherits = + doc = """UsdShadeNodeDefAPI is an API schema that provides attributes + for a prim to select a corresponding Shader Node Definition ("Sdr Node"), + as well as to look up a runtime entry for that shader node in the + form of an SdrShaderNode. + + UsdShadeNodeDefAPI is intended to be a pre-applied API schema for any + prim type that wants to refer to the SdrRegistry for further implementation + details about the behavior of that prim. The primary use in UsdShade + itself is as UsdShadeShader, which is a basis for material shading networks + (UsdShadeMaterial), but this is intended to be used in other domains + that also use the Sdr node mechanism. + + This schema provides properties that allow a prim to identify an external + node definition, either by a direct identifier key into the SdrRegistry + (info:id), an asset to be parsed by a suitable NdrParserPlugin + (info:sourceAsset), or an inline source code that must also be parsed + (info:sourceCode); as well as a selector attribute to determine which + specifier is active (info:implementationSource). + """ + customData = { + token apiSchemaType = "singleApply" + string extraIncludes = """ +#include "pxr/usd/ndr/declare.h" +#include "pxr/usd/sdr/shaderNode.h" + """ + dictionary schemaTokens = { + dictionary universalSourceType = { + string value = "" + string doc = """Possible value for the "sourceType" parameter + in \\ref UsdShadeNodeDefAPI_ImplementationSource API. Represents + the universal or fallback source type. + """ + } + dictionary sdrMetadata = { + string doc = """Dictionary valued metadata key authored on + Shader prims with implementationSource value of sourceAsset or + sourceCode to pass along metadata to the shader parser or + compiler. It is also used to author metadata on shader + properties in a UsdShade-based shader definition file. + """ + } + dictionary subIdentifier = { + string doc = """This identifier is used in conjunction with a + specific source asset to indicate a particular definition within + the source asset, if the source asset specifies more than one + shader node definition. + """ + } + } + } +) +{ + uniform token info:implementationSource = "id" ( + allowedTokens = ["id", "sourceAsset", "sourceCode"] + doc = """Specifies the attribute that should be consulted to get the + shader's implementation or its source code. + + * If set to "id", the "info:id" attribute's value is used to + determine the shader source from the shader registry. + * If set to "sourceAsset", the resolved value of the "info:sourceAsset" + attribute corresponding to the desired implementation (or source-type) + is used to locate the shader source. A source asset file may also + specify multiple shader definitions, so there is an optional attribute + "info:sourceAsset:subIdentifier" whose value should be used to indicate + a particular shader definition from a source asset file. + * If set to "sourceCode", the value of "info:sourceCode" attribute + corresponding to the desired implementation (or source type) is used as + the shader source. + """ + customData = { + string apiName = "implementationSource" + } + ) + + uniform token info:id ( + doc = """The id is an identifier for the type or purpose of the + shader. E.g.: Texture or FractalFloat. + The use of this id will depend on the render target: some will turn it + into an actual shader path, some will use it to generate shader source + code dynamically. + + \\sa SetShaderId() + """ + customData = { + string apiName = "id" + } + ) +} + +class "ConnectableAPI" +( + inherits = + doc = """UsdShadeConnectableAPI is an API schema that provides a common + interface for creating outputs and making connections between shading + parameters and outputs. The interface is common to all UsdShade schemas + that support Inputs and Outputs, which currently includes UsdShadeShader, + UsdShadeNodeGraph, and UsdShadeMaterial . + + One can construct a UsdShadeConnectableAPI directly from a UsdPrim, or + from objects of any of the schema classes listed above. If it seems + onerous to need to construct a secondary schema object to interact with + Inputs and Outputs, keep in mind that any function whose purpose is either + to walk material/shader networks via their connections, or to create such + networks, can typically be written entirely in terms of + UsdShadeConnectableAPI objects, without needing to care what the underlying + prim type is. + + Additionally, the most common UsdShadeConnectableAPI behaviors + (creating Inputs and Outputs, and making connections) are wrapped as + convenience methods on the prim schema classes (creation) and + UsdShadeInput and UsdShadeOutput. + """ + customData = { + token apiSchemaType = "nonApplied" + string extraIncludes = ''' +#include "pxr/usd/usd/typed.h" +#include "pxr/usd/usdShade/input.h" +#include "pxr/usd/usdShade/output.h" +#include "pxr/usd/usdShade/tokens.h" +#include "pxr/usd/usdShade/types.h"''' + } +) +{ +} + +class "MaterialBindingAPI" +( + inherits = + doc = """UsdShadeMaterialBindingAPI is an API schema that provides an + interface for binding materials to prims or collections of prims + (represented by UsdCollectionAPI objects). + + In the USD shading model, each renderable gprim computes a single + resolved Material that will be used to shade the gprim (exceptions, + of course, for gprims that possess UsdGeomSubsets, as each subset can be + shaded by a different Material). A gprim and each of its ancestor + prims can possess, through the MaterialBindingAPI, both a + direct binding to a Material, and any number of + collection-based bindings to Materials; each binding can be generic + or declared for a particular purpose, and given a specific binding + strength. It is the process of "material resolution" (see + \\ref UsdShadeMaterialBindingAPI_MaterialResolution) that examines all of + these bindings, and selects the one Material that best matches the + client's needs. + + The intent of purpose is that each gprim should be able to resolve a + Material for any given purpose, which implies it can have differently bound + materials for different purposes. There are two special values of + purpose defined in UsdShade, although the API fully supports + specifying arbitrary values for it, for the sake of extensibility: +
  • UsdShadeTokens->full: to be used when the purpose of the + render is entirely to visualize the truest representation of a scene, + considering all lighting and material information, at highest fidelity.
  • +
  • UsdShadeTokens->preview: to be used when the render is in + service of a goal other than a high fidelity "full" render (such as scene + manipulation, modeling, or realtime playback). Latency and speed are + generally of greater concern for preview renders, therefore preview + materials are generally designed to be "lighterweight" compared to full + materials.
+ A binding can also have no specific purpose at all, in which + case, it is considered to be the fallback or all-purpose binding (denoted + by the empty-valued token UsdShadeTokens->allPurpose). + + The purpose of a material binding is encoded in the name of the + binding relationship. +
  • + In the case of a direct binding, the allPurpose binding is + represented by the relationship named "material:binding". + Special-purpose direct bindings are represented by relationships named + "material:binding:purpose. A direct binding relationship + must have a single target path that points to a UsdShadeMaterial.
  • +
  • + In the case of a collection-based binding, the allPurpose binding is + represented by a relationship named + "material:binding:collection:bindingName", where + bindingName establishes an identity for the binding that is unique + on the prim. Attempting to establish two collection bindings of the same + name on the same prim will result in the first binding simply being + overridden. A special-purpose collection-based binding is represented by a + relationship named "material:binding:collection:purpose:bindingName". + A collection-based binding relationship must have exacly two targets, one of + which should be a collection-path (see + \ref UsdCollectionAPI::GetCollectionPath()) and the other should point to a + UsdShadeMaterial. In the future, we may allow a single collection + binding to target multiple collections, if we can establish a reasonable + round-tripping pattern for applications that only allow a single collection + to be associated with each Material. +
  • +
+ + Note: Both bindingName and purpose must be + non-namespaced tokens. This allows us to know the role of a binding + relationship simply from the number of tokens in it. +
  • Two tokens: the fallback, "all purpose", direct binding, + material:binding
  • +
  • Three tokens: a purpose-restricted, direct, fallback binding, + e.g. material:binding:preview
  • +
  • Four tokens: an all-purpose, collection-based binding, e.g. + material:binding:collection:metalBits
  • +
  • Five tokens: a purpose-restricted, collection-based binding, + e.g. material:binding:collection:full:metalBits
  • +
+ + A binding-strength value is used to specify whether a binding + authored on a prim should be weaker or stronger than bindings that appear + lower in namespace. We encode the binding strength with as token-valued + metadata 'bindMaterialAs' for future flexibility, even though for + now, there are only two possible values: + UsdShadeTokens->weakerThanDescendants and + UsdShadeTokens->strongerThanDescendants. When binding-strength is + not authored (i.e. empty) on a binding-relationship, the default behavior + matches UsdShadeTokens->weakerThanDescendants. + + \\note If a material binding relationship is a built-in property defined as + part of a typed prim's schema, a fallback value should not be provided for + it. This is because the "material resolution" algorithm only conisders + authored properties. + """ + customData = { + string extraIncludes = """ +#include "pxr/usd/usd/collectionAPI.h" +#include "pxr/usd/usdGeom/subset.h" +#include "pxr/usd/usdShade/material.h" +#include """ + dictionary schemaTokens = { + dictionary materialBinding = { + string value = "material:binding" + string doc = """ + The relationship name on non-shading prims to + denote a binding to a UsdShadeMaterial. + """ + } + dictionary materialBindingCollection = { + string value = "material:binding:collection" + string doc = """ + The relationship name on non-shading prims to + denote a collection-based binding to a UsdShadeMaterial. + """ + } + dictionary allPurpose = { + string value = "" + string doc = """Possible value for the 'materialPurpose' parameter + in the various methods available in UsdShadeMaterialBindingAPI. + Its value is empty and its purpose is to represent a general + purpose material-binding that applies in the absence of a + specific-purpose binding. + """ + } + dictionary preview = { + string doc = """Possible value for the 'materialPurpose' + parameter in UsdShadeMaterialBindingAPI, to be used when the + render is in service of a goal other than a high fidelity "full" + render (such as scene manipulation, modeling, or realtime + playback). Latency and speed are generally of greater concern + for preview renders, therefore preview materials are generally + designed to be "lighterweight" compared to full materials. + """ + } + + dictionary full = { + string doc= """Possible value for the 'materialPurpose' + parameter in UsdShadeMaterialBindingAPI, to be used when the + purpose of the render is entirely about visualizing the truest + representation of a scene, considering all lighting and material + information, at highest fidelity. + """ + } + dictionary bindMaterialAs = { + string doc = """Token valued metadata key authored on a material + binding relationship to indicate the strength of the binding + relative to bindings authored on descendants. + """ + } + dictionary weakerThanDescendants = { + string doc = """Possible value for 'bindMaterialAs' metadata on the + collection-based material binding relationship. Indicates + that the binding represented by the relationship is weaker than + any bindings authored on the descendants.""" + } + dictionary strongerThanDescendants = { + string doc = """Possible value for 'bindMaterialAs' metadata on the + collection-based material binding relationship. Indicates + that the binding represented by the relationship is stronger than + any bindings authored on the descendants.""" + } + dictionary fallbackStrength = { + string doc = """Sentinal value to be used for 'bindMaterialAs' + metadata's default value. Clients should pass this in for the + 'bindingStrength' argument to UsdShadeMaterialBindingAPI::Bind(), + if they want to author the default value (weakerThanDescendants) + sparsely. The value "fallbackStrength" never gets authored + into scene description.""" + } + } + } +) +{ + +} + +class "CoordSysAPI" +( + inherits = + doc = """UsdShadeCoordSysAPI provides a way to designate, name, + and discover coordinate systems. + + Coordinate systems are implicitly established by UsdGeomXformable + prims, using their local space. That coordinate system may be + bound (i.e., named) from another prim. The binding is encoded + as a single-target relationship. + Coordinate system bindings apply to descendants of the prim + where the binding is expressed, but names may be re-bound by + descendant prims. + + CoordSysAPI is a multi-apply API schema, where instance names + signify the named coordinate systems. The instance names are + used with the "coordSys:" namespace to determine the binding + to the UsdGeomXformable prim. + + Named coordinate systems are useful in shading (and other) workflows. + An example is projection paint, which projects a texture + from a certain view (the paint coordinate system), encoded as + (e.g.) "rel coordSys:paint:binding". Using the paint coordinate frame + avoids the need to assign a UV set to the object, and can be a + concise way to project paint across a collection of objects with + a single shared paint coordinate system. + """ + customData = { + token apiSchemaType = "multipleApply" + token propertyNamespacePrefix = "coordSys" + string extraIncludes = ''' +#include "pxr/usd/usdGeom/xformable.h"''' + } +) +{ + rel binding ( + displayName = "Bound Coordinate System" + doc = "Prim binding expressing the appropriate coordinate systems." + ) +} diff --git a/blender/lib/usd/usdSkel/resources/generatedSchema.usda b/blender/lib/usd/usdSkel/resources/generatedSchema.usda new file mode 100644 index 0000000..96538a6 --- /dev/null +++ b/blender/lib/usd/usdSkel/resources/generatedSchema.usda @@ -0,0 +1,367 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class SkelRoot "SkelRoot" ( + doc = '''Boundable prim type used to identify a scope beneath which + skeletally-posed primitives are defined. + + A SkelRoot must be defined at or above a skinned primitive for any skinning + behaviors in UsdSkel. + + See the extended "Skel Root Schema" documentation for + more information.''' +) +{ + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Skeleton "Skeleton" ( + doc = '''Describes a skeleton. + + See the extended "Skeleton Schema" documentation for + more information. + ''' +) +{ + uniform matrix4d[] bindTransforms ( + doc = """Specifies the bind-pose transforms of each joint in + **world space**, in the ordering imposed by *joints*.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + uniform token[] jointNames ( + doc = """If authored, provides a unique name per joint. This may be + optionally set to provide better names when translating to DCC apps + that require unique joint names.""" + ) + uniform token[] joints ( + doc = """An array of path tokens identifying the set of joints that make + up the skeleton, and their order. Each token in the array must be valid + when parsed as an SdfPath. The parent-child relationships of the + corresponding paths determine the parent-child relationships of each + joint. It is not required that the name at the end of each path be + unique, but rather only that the paths themselves be unique.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + uniform matrix4d[] restTransforms ( + doc = """Specifies the rest-pose transforms of each joint in + **local space**, in the ordering imposed by *joints*. This provides + fallback values for joint transforms when a Skeleton either has no + bound animation source, or when that animation source only contains + animation for a subset of a Skeleton's joints.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class SkelAnimation "SkelAnimation" ( + doc = '''Describes a skel animation, where joint animation is stored in a + vectorized form. + + See the extended "Skel Animation" + documentation for more information. + ''' +) +{ + uniform token[] blendShapes ( + doc = """Array of tokens identifying which blend shapes this + animation's data applies to. The tokens for blendShapes correspond to + the tokens set in the *skel:blendShapes* binding property of the + UsdSkelBindingAPI.""" + ) + float[] blendShapeWeights ( + doc = """Array of weight values for each blend shape. Each weight value + is associated with the corresponding blend shape identified within the + *blendShapes* token array, and therefore must have the same length as + *blendShapes.""" + ) + uniform token[] joints ( + doc = """Array of tokens identifying which joints this animation's + data applies to. The tokens for joints correspond to the tokens of + Skeleton primitives. The order of the joints as listed here may + vary from the order of joints on the Skeleton itself.""" + ) + quatf[] rotations ( + doc = """Joint-local unit quaternion rotations of all affected joints, + in 32-bit precision. Array length should match the size of the + *joints* attribute.""" + ) + half3[] scales ( + doc = """Joint-local scales of all affected joints, in + 16 bit precision. Array length should match the size of the *joints* + attribute.""" + ) + float3[] translations ( + doc = """Joint-local translations of all affected joints. Array length + should match the size of the *joints* attribute.""" + ) +} + +class PackedJointAnimation "PackedJointAnimation" ( + doc = "Deprecated. Please use SkelAnimation instead." +) +{ + uniform token[] blendShapes ( + doc = """Array of tokens identifying which blend shapes this + animation's data applies to. The tokens for blendShapes correspond to + the tokens set in the *skel:blendShapes* binding property of the + UsdSkelBindingAPI.""" + ) + float[] blendShapeWeights ( + doc = """Array of weight values for each blend shape. Each weight value + is associated with the corresponding blend shape identified within the + *blendShapes* token array, and therefore must have the same length as + *blendShapes.""" + ) + uniform token[] joints ( + doc = """Array of tokens identifying which joints this animation's + data applies to. The tokens for joints correspond to the tokens of + Skeleton primitives. The order of the joints as listed here may + vary from the order of joints on the Skeleton itself.""" + ) + quatf[] rotations ( + doc = """Joint-local unit quaternion rotations of all affected joints, + in 32-bit precision. Array length should match the size of the + *joints* attribute.""" + ) + half3[] scales ( + doc = """Joint-local scales of all affected joints, in + 16 bit precision. Array length should match the size of the *joints* + attribute.""" + ) + float3[] translations ( + doc = """Joint-local translations of all affected joints. Array length + should match the size of the *joints* attribute.""" + ) +} + +class "SkelBindingAPI" ( + doc = '''Provides API for authoring and extracting all the skinning-related + data that lives in the "geometry hierarchy" of prims and models that want + to be skeletally deformed. + + See the extended "UsdSkelBindingAPI schema" + documentation for more about bindings and how they apply in a scene graph. + ''' +) +{ + matrix4d primvars:skel:geomBindTransform ( + doc = '''Encodes the bind-time world space transforms of the prim. + If the transform is identical for a group of gprims that share a common + ancestor, the transform may be authored on the ancestor, to "inherit" + down to all the leaf gprims. If this transform is unset, an identity + transform is used instead.''' + ) + int[] primvars:skel:jointIndices ( + doc = """Indices into the *joints* attribute of the closest + (in namespace) bound Skeleton that affect each point of a PointBased + gprim. The primvar can have either *constant* or *vertex* interpolation. + This primvar's *elementSize* will determine how many joint influences + apply to each point. Indices must point be valid. Null influences should + be defined by setting values in jointWeights to zero. + See UsdGeomPrimvar for more information on interpolation and + elementSize.""" + ) + float[] primvars:skel:jointWeights ( + doc = """Weights for the joints that affect each point of a PointBased + gprim. The primvar can have either *constant* or *vertex* interpolation. + This primvar's *elementSize* will determine how many joints influences + apply to each point. The length, interpolation, and elementSize of + *jointWeights* must match that of *jointIndices*. See UsdGeomPrimvar + for more information on interpolation and elementSize.""" + ) + uniform token primvars:skel:skinningMethod = "classicLinear" ( + allowedTokens = ["classicLinear", "dualQuaternion"] + doc = "The skinningMethod specifies the skinning method for the prim." + ) + rel skel:animationSource ( + doc = """Animation source to be bound to Skeleton primitives at or + beneath the location at which this property is defined. + """ + ) + uniform token[] skel:blendShapes ( + doc = """An array of tokens defining the order onto which blend shape + weights from an animation source map onto the *skel:blendShapeTargets* + rel of a binding site. If authored, the number of elements must be equal + to the number of targets in the _blendShapeTargets_ rel. This property + is not inherited hierarchically, and is expected to be authored directly + on the skinnable primitive to which the blend shapes apply.""" + ) + rel skel:blendShapeTargets ( + doc = """Ordered list of all target blend shapes. This property is not + inherited hierarchically, and is expected to be authored directly on + the skinnable primitive to which the the blend shapes apply.""" + ) + uniform token[] skel:joints ( + doc = """An (optional) array of tokens defining the list of + joints to which jointIndices apply. If not defined, jointIndices applies + to the ordered list of joints defined in the bound Skeleton's *joints* + attribute. If undefined on a primitive, the primitive inherits the + value of the nearest ancestor prim, if any.""" + ) + rel skel:skeleton ( + doc = """Skeleton to be bound to this prim and its descendents that + possess a mapping and weighting to the joints of the identified + Skeleton.""" + ) +} + +class BlendShape "BlendShape" ( + doc = '''Describes a target blend shape, possibly containing inbetween + shapes. + + See the extended "Blend Shape Schema + documentation for information. + ''' +) +{ + uniform vector3f[] normalOffsets ( + doc = """**Required property**. Normal offsets which, when added to the + base pose, provides the normals of the target shape.""" + ) + uniform vector3f[] offsets ( + doc = """**Required property**. Position offsets which, when added to the + base pose, provides the target shape.""" + ) + uniform int[] pointIndices ( + doc = """**Optional property**. Indices into the original mesh that + correspond to the values in *offsets* and of any inbetween shapes. If + authored, the number of elements must be equal to the number of elements + in the *offsets* array.""" + ) +} + diff --git a/blender/lib/usd/usdSkel/resources/plugInfo.json b/blender/lib/usd/usdSkel/resources/plugInfo.json new file mode 100644 index 0000000..ce87f89 --- /dev/null +++ b/blender/lib/usd/usdSkel/resources/plugInfo.json @@ -0,0 +1,91 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "SdfMetadata": { + "weight": { + "appliesTo": [ + "attributes" + ], + "default": 0, + "displayGroup": "BlendShape", + "documentation": "The weight value at which an inbeteen shape is applied.", + "type": "float" + } + }, + "Types": { + "UsdSkelAnimation": { + "alias": { + "UsdSchemaBase": "SkelAnimation" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdSkelBindingAPI": { + "alias": { + "UsdSchemaBase": "SkelBindingAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdSkelBlendShape": { + "alias": { + "UsdSchemaBase": "BlendShape" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdSkelPackedJointAnimation": { + "alias": { + "UsdSchemaBase": "PackedJointAnimation" + }, + "autoGenerated": true, + "bases": [ + "UsdSkelAnimation" + ], + "schemaKind": "concreteTyped" + }, + "UsdSkelRoot": { + "alias": { + "UsdSchemaBase": "SkelRoot" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomBoundable" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + }, + "UsdSkelSkeleton": { + "alias": { + "UsdSchemaBase": "Skeleton" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomBoundable" + ], + "implementsComputeExtent": true, + "schemaKind": "concreteTyped" + } + } + }, + "LibraryPath": "", + "Name": "usdSkel", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdSkel/resources/usdSkel/schema.usda b/blender/lib/usd/usdSkel/resources/usdSkel/schema.usda new file mode 100644 index 0000000..052bad7 --- /dev/null +++ b/blender/lib/usd/usdSkel/resources/usdSkel/schema.usda @@ -0,0 +1,296 @@ +#usda 1.0 +( + """ This file contains a schema for supporting skeletal animations in USD. + """ + subLayers = [ + @usdGeom/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdSkel" + string libraryPath = "pxr/usd/usdSkel" + + dictionary libraryTokens = { + dictionary weight = { + string doc = """UsdSkelInbetweenShape - The weight location at + which the inbetween shape applies.""" + } + } + } +) { +} + + +class SkelRoot "SkelRoot" ( + inherits = + doc = """Boundable prim type used to identify a scope beneath which + skeletally-posed primitives are defined. + + A SkelRoot must be defined at or above a skinned primitive for any skinning + behaviors in UsdSkel. + + See the extended \\ref UsdSkel_SkelRoot "Skel Root Schema" documentation for + more information.""" + customData = { + string className = "Root" + + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + } +) { +} + + +class Skeleton "Skeleton" ( + inherits = + doc = """Describes a skeleton. + + See the extended \\ref UsdSkel_Skeleton "Skeleton Schema" documentation for + more information. + """ + customData = { + dictionary extraPlugInfo = { + bool implementsComputeExtent = true + } + + string extraIncludes = """ +#include "pxr/usd/usdSkel/topology.h" """ + } +) { + uniform token[] joints ( + doc = """An array of path tokens identifying the set of joints that make + up the skeleton, and their order. Each token in the array must be valid + when parsed as an SdfPath. The parent-child relationships of the + corresponding paths determine the parent-child relationships of each + joint. It is not required that the name at the end of each path be + unique, but rather only that the paths themselves be unique.""" + ) + + uniform token[] jointNames ( + doc = """If authored, provides a unique name per joint. This may be + optionally set to provide better names when translating to DCC apps + that require unique joint names.""" + ) + + uniform matrix4d[] bindTransforms ( + doc = """Specifies the bind-pose transforms of each joint in + **world space**, in the ordering imposed by *joints*.""" + ) + uniform matrix4d[] restTransforms ( + doc = """Specifies the rest-pose transforms of each joint in + **local space**, in the ordering imposed by *joints*. This provides + fallback values for joint transforms when a Skeleton either has no + bound animation source, or when that animation source only contains + animation for a subset of a Skeleton's joints.""" + ) +} + + +class SkelAnimation "SkelAnimation" ( + inherits = + doc = """Describes a skel animation, where joint animation is stored in a + vectorized form. + + See the extended \\ref UsdSkel_SkelAnimation "Skel Animation" + documentation for more information. + """ + customData = { + string className = "Animation" + } +) { + uniform token[] joints ( + doc = """Array of tokens identifying which joints this animation's + data applies to. The tokens for joints correspond to the tokens of + Skeleton primitives. The order of the joints as listed here may + vary from the order of joints on the Skeleton itself.""" + ) + + float3[] translations ( + doc = """Joint-local translations of all affected joints. Array length + should match the size of the *joints* attribute.""" + ) + + quatf[] rotations ( + doc = """Joint-local unit quaternion rotations of all affected joints, + in 32-bit precision. Array length should match the size of the + *joints* attribute.""" + ) + + half3[] scales ( + doc = """Joint-local scales of all affected joints, in + 16 bit precision. Array length should match the size of the *joints* + attribute.""" + ) + + uniform token[] blendShapes ( + doc = """Array of tokens identifying which blend shapes this + animation's data applies to. The tokens for blendShapes correspond to + the tokens set in the *skel:blendShapes* binding property of the + UsdSkelBindingAPI.""" + ) + + float[] blendShapeWeights ( + doc = """Array of weight values for each blend shape. Each weight value + is associated with the corresponding blend shape identified within the + *blendShapes* token array, and therefore must have the same length as + *blendShapes.""" + ) +} + + +class PackedJointAnimation "PackedJointAnimation" ( + inherits = + doc = """Deprecated. Please use SkelAnimation instead.""" +) +{ +} + +class "SkelBindingAPI" ( + inherits = + doc = """Provides API for authoring and extracting all the skinning-related + data that lives in the "geometry hierarchy" of prims and models that want + to be skeletally deformed. + + See the extended \\ref UsdSkel_BindingAPI "UsdSkelBindingAPI schema" + documentation for more about bindings and how they apply in a scene graph. + """ + customData = { + string className = "BindingAPI" + string extraIncludes = """ +#include "pxr/base/tf/span.h" +#include "pxr/usd/usdGeom/primvar.h" +#include "pxr/usd/usdSkel/skeleton.h" """ + } +) +{ + rel skel:animationSource ( + customData = { + string apiName = "animationSource" + } + doc = """Animation source to be bound to Skeleton primitives at or + beneath the location at which this property is defined. + """ + ) + + rel skel:skeleton ( + customData = { + string apiName = "skeleton" + } + doc = """Skeleton to be bound to this prim and its descendents that + possess a mapping and weighting to the joints of the identified + Skeleton.""" + ) + + uniform token primvars:skel:skinningMethod = "classicLinear" ( + customData = { + string apiName = "skinningMethod" + } + allowedTokens = ["classicLinear", "dualQuaternion"] + doc = """The skinningMethod specifies the skinning method for the prim.""" + ) + + matrix4d primvars:skel:geomBindTransform ( + customData = { + string apiName = "geomBindTransform" + } + doc = """Encodes the bind-time world space transforms of the prim. + If the transform is identical for a group of gprims that share a common + ancestor, the transform may be authored on the ancestor, to "inherit" + down to all the leaf gprims. If this transform is unset, an identity + transform is used instead.""" + ) + + uniform token[] skel:joints ( + customData = { + string apiName = "joints" + } + doc = """An (optional) array of tokens defining the list of + joints to which jointIndices apply. If not defined, jointIndices applies + to the ordered list of joints defined in the bound Skeleton's *joints* + attribute. If undefined on a primitive, the primitive inherits the + value of the nearest ancestor prim, if any.""" + ) + + int[] primvars:skel:jointIndices ( + customData = { + string apiName = "jointIndices" + } + doc = """Indices into the *joints* attribute of the closest + (in namespace) bound Skeleton that affect each point of a PointBased + gprim. The primvar can have either *constant* or *vertex* interpolation. + This primvar's *elementSize* will determine how many joint influences + apply to each point. Indices must point be valid. Null influences should + be defined by setting values in jointWeights to zero. + See UsdGeomPrimvar for more information on interpolation and + elementSize.""" + ) + + float[] primvars:skel:jointWeights ( + customData = { + string apiName = "jointWeights" + } + doc = """Weights for the joints that affect each point of a PointBased + gprim. The primvar can have either *constant* or *vertex* interpolation. + This primvar's *elementSize* will determine how many joints influences + apply to each point. The length, interpolation, and elementSize of + *jointWeights* must match that of *jointIndices*. See UsdGeomPrimvar + for more information on interpolation and elementSize.""" + ) + + uniform token[] skel:blendShapes ( + customData = { + string apiName = "blendShapes" + } + doc = """An array of tokens defining the order onto which blend shape + weights from an animation source map onto the *skel:blendShapeTargets* + rel of a binding site. If authored, the number of elements must be equal + to the number of targets in the _blendShapeTargets_ rel. This property + is not inherited hierarchically, and is expected to be authored directly + on the skinnable primitive to which the blend shapes apply.""" + ) + rel skel:blendShapeTargets ( + customData = { + string apiName= "blendShapeTargets" + } + doc = """Ordered list of all target blend shapes. This property is not + inherited hierarchically, and is expected to be authored directly on + the skinnable primitive to which the the blend shapes apply.""" + ) +} + + +class BlendShape "BlendShape" ( + inherits = + doc = """Describes a target blend shape, possibly containing inbetween + shapes. + + See the extended \\ref UsdSkel_BlendShape "Blend Shape Schema + documentation for information. + """ + customData = { + string extraIncludes = """ +#include "pxr/base/tf/span.h" +#include "pxr/usd/usdSkel/inbetweenShape.h" """ + } +) +{ + uniform vector3f[] offsets ( + doc = """**Required property**. Position offsets which, when added to the + base pose, provides the target shape.""" + ) + + uniform vector3f[] normalOffsets ( + doc = """**Required property**. Normal offsets which, when added to the + base pose, provides the normals of the target shape.""" + ) + + uniform int[] pointIndices ( + doc = """**Optional property**. Indices into the original mesh that + correspond to the values in *offsets* and of any inbetween shapes. If + authored, the number of elements must be equal to the number of elements + in the *offsets* array.""" + ) +} diff --git a/blender/lib/usd/usdSkelImaging/resources/plugInfo.json b/blender/lib/usd/usdSkelImaging/resources/plugInfo.json new file mode 100644 index 0000000..a7e7be1 --- /dev/null +++ b/blender/lib/usd/usdSkelImaging/resources/plugInfo.json @@ -0,0 +1,29 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdSkelImagingSkeletonAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "Skeleton" + }, + "UsdSkelImagingSkelRootAdapter": { + "bases": [ + "UsdImagingPrimAdapter" + ], + "isInternal": true, + "primTypeName": "SkelRoot" + } + } + }, + "LibraryPath": "", + "Name": "usdSkelImaging", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdSkelImaging/resources/shaders/skinning.glslfx b/blender/lib/usd/usdSkelImaging/resources/shaders/skinning.glslfx new file mode 100644 index 0000000..de24a59 --- /dev/null +++ b/blender/lib/usd/usdSkelImaging/resources/shaders/skinning.glslfx @@ -0,0 +1,259 @@ +-- glslfx version 0.1 + +-- configuration +{ + "techniques": { + "default": { + "skinPointsLBSKernel": { + "source": [ "Compute.SkinPointsLBS" ] + }, + "skinPointsDQSKernel": { + "source": [ "Compute.SkinPointsDQS" ] + }, + "skinPointsSimpleKernel": { + "source": [ "Compute.SkinPointsSimple" ] + } + } + } +} + +-- glsl Compute.SkinPointsLBS + +const float EPS = 1e-5; + +void compute(int index) +{ + // model space -> bind space + mat4 geomBindXform = HdGet_geomBindXform(); + + vec3 restP = HdGet_restPoints(index); + + // apply blend shapes + int numBlendShapeOffsetRanges = HdGet_numBlendShapeOffsetRanges(); + if (index < numBlendShapeOffsetRanges) { + ivec2 blendShapeOffsetRange = HdGet_blendShapeOffsetRanges(index); + for (int i = blendShapeOffsetRange.x; i < blendShapeOffsetRange.y; ++i) { + vec4 offset = HdGet_blendShapeOffsets(i); + int shapeIndex = int(offset.w); + float weight = HdGet_blendShapeWeights(shapeIndex); + restP += offset.xyz * weight; + } + } + vec4 initP = geomBindXform * vec4(restP, 1); + + int numInfluencesPerComponent = HdGet_numInfluencesPerComponent(); + vec3 p; + if (numInfluencesPerComponent > 0) { + p = vec3(0,0,0); + + bool constantPointInfluence = HdGet_hasConstantInfluences(); + int offset = constantPointInfluence? 0 : numInfluencesPerComponent*index; + + for (int i = 0; i < numInfluencesPerComponent; i++) { + vec2 influence = HdGet_influences(offset + i); + float jointWeight = influence.y; + + if (jointWeight > EPS) { + int jointIdx = int( influence.x ); + mat4 skinningXform = HdGet_skinningXforms(jointIdx); + + p += ((skinningXform * initP) * jointWeight).xyz; + } + } + } else { + p = initP.xyz; + } + + // skel space -> world space -> model space + // XXX: Casts to mat4 below are necessary because the matrices passed + // down use doubles and not floats. + mat4 skelToPrimLocal = mat4( HdGet_primWorldToLocal() ) * + mat4( HdGet_skelLocalToWorld() ); + p = (skelToPrimLocal * vec4(p,1)).xyz; + + HdSet_skinnedPoints(index, p); +} + + +-- glsl Compute.SkinPointsDQS + +const float EPS = 1e-5; +const float NORM_EPS = 1e-10; + +vec4 GetPivotQuaternion(int numInfluencesPerComponent, int offset) +{ + vec4 pivotQuat = vec4(0); + int pivotIdx = -1; + float maxw = -1; + for (int i = 0; i < numInfluencesPerComponent; i++) { + vec2 influence = HdGet_influences(offset + i); + float jointWeight = influence.y; + if (pivotIdx < 0 || maxw < jointWeight) { + int jointIdx = int( influence.x ); + maxw = jointWeight; + pivotIdx = jointIdx; + } + } + if (pivotIdx >= 0) + pivotQuat = HdGet_skinningDualQuats(pivotIdx*2); + + return pivotQuat; +} + +vec3 TransformByQuaternion(vec4 quat, vec3 vec) +{ + // See GfQuat::Transform() for algorithm + + float r1 = quat.w; + vec3 i1 = quat.xyz; + + vec3 i2 = vec3(r1 * vec[0] + (i1[1] * vec[2] - i1[2] * vec[1]), + r1 * vec[1] + (i1[2] * vec[0] - i1[0] * vec[2]), + r1 * vec[2] + (i1[0] * vec[1] - i1[1] * vec[0])); + + return vec3(vec[0] + 2.0 * (i1[1] * i2[2] - i1[2] * i2[1]), + vec[1] + 2.0 * (i1[2] * i2[0] - i1[0] * i2[2]), + vec[2] + 2.0 * (i1[0] * i2[1] - i1[1] * i2[0])); +} + +vec3 GetDualQuaternionTranslation(vec4 real, vec4 dual) +{ + // See GfDualQuat::GetTranslation() for algorithm + + float scale = -2.0; + + float rw = real.w; + vec3 ri = real.xyz; + + float dw = dual.w; + vec3 di = dual.xyz; + + return vec3( (dw*ri[0] - rw*di[0] + di[1]*ri[2] - di[2]*ri[1])*scale, + (dw*ri[1] - rw*di[1] + di[2]*ri[0] - di[0]*ri[2])*scale, + (dw*ri[2] - rw*di[2] + di[0]*ri[1] - di[1]*ri[0])*scale ); +} + +void compute(int index) +{ + // model space -> bind space + mat4 geomBindXform = HdGet_geomBindXform(); + + vec3 restP = HdGet_restPoints(index); + + // apply blend shapes + int numBlendShapeOffsetRanges = HdGet_numBlendShapeOffsetRanges(); + if (index < numBlendShapeOffsetRanges) { + ivec2 blendShapeOffsetRange = HdGet_blendShapeOffsetRanges(index); + for (int i = blendShapeOffsetRange.x; i < blendShapeOffsetRange.y; ++i) { + vec4 offset = HdGet_blendShapeOffsets(i); + int shapeIndex = int(offset.w); + float weight = HdGet_blendShapeWeights(shapeIndex); + restP += offset.xyz * weight; + } + } + vec3 initP = (geomBindXform * vec4(restP, 1)).xyz; + + int numInfluencesPerComponent = HdGet_numInfluencesPerComponent(); + vec3 p; + if (numInfluencesPerComponent > 0) { + +#ifdef HD_HAS_skinningScaleXforms + vec3 scaledP = vec3(0, 0, 0); +#endif + + bool constantPointInfluence = HdGet_hasConstantInfluences(); + int offset = constantPointInfluence ? 0 : numInfluencesPerComponent*index; + + // find the pivot quaternion + vec4 pivotQuat = GetPivotQuaternion(numInfluencesPerComponent, offset); + + // find the weighted sum dual quaternion + vec4 weightedSumDQReal = vec4(0); + vec4 weightedSumDQDual = vec4(0); + + for (int i = 0; i < numInfluencesPerComponent; i++) { + vec2 influence = HdGet_influences(offset + i); + float jointWeight = influence.y; + + if (jointWeight > EPS) { + int jointIdx = int(influence.x); + +#ifdef HD_HAS_skinningScaleXforms + // Apply scale using LBS, if any of the skinning xforms has scales + mat3 scaleXform = HdGet_skinningScaleXforms(jointIdx); + scaledP += ((scaleXform * initP) * jointWeight); +#endif + + // Apply rotation & translation using DQS + vec4 skinningDQReal = HdGet_skinningDualQuats(jointIdx*2); + vec4 skinningDQDual = HdGet_skinningDualQuats(jointIdx*2+1); + // Flip the dual quaternion, if necessary, to make it + // on the same hemisphere as the pivotQuat. + if (dot(skinningDQReal, pivotQuat) < 0.0) + jointWeight = -jointWeight; + + weightedSumDQReal += (skinningDQReal * jointWeight); + weightedSumDQDual += (skinningDQDual * jointWeight); + } + } + + // normalize weightedSumDQ + float realLength = length(weightedSumDQReal); + if (realLength < NORM_EPS) { + weightedSumDQReal = vec4(0, 0, 0, 1); // identity quaternion + weightedSumDQDual = vec4(0); // zero quaternion + } else { + float inverseRealLength = 1.0 / realLength; + // rotation normalization + weightedSumDQReal *= inverseRealLength; + weightedSumDQDual *= inverseRealLength; + // plucker normalization + weightedSumDQDual -= (dot(weightedSumDQReal, weightedSumDQDual) * weightedSumDQReal); + } + +#ifdef HD_HAS_skinningScaleXforms + // transform scaledP by weightedSumDQ + p = TransformByQuaternion(weightedSumDQReal, scaledP) + + GetDualQuaternionTranslation(weightedSumDQReal, weightedSumDQDual); +#else + // transform initP by weightedSumDQ + p = TransformByQuaternion(weightedSumDQReal, initP) + + GetDualQuaternionTranslation(weightedSumDQReal, weightedSumDQDual); +#endif + } else { + p = initP; + } + + // skel space -> world space -> model space + // XXX: Casts to mat4 below are necessary because the matrices passed + // down use doubles and not floats. + mat4 skelToPrimLocal = mat4(HdGet_primWorldToLocal()) * + mat4(HdGet_skelLocalToWorld()); + p = (skelToPrimLocal * vec4(p,1)).xyz; + + HdSet_skinnedPoints(index, p); +} + + +-- glsl Compute.SkinPointsSimple + +void compute(int index) +{ + // This is simple joint-constraint skinning model. + + mat4 geomBindXform = HdGet_geomBindXform(); + int jointIndex = int( HdGet_influences(index).x ); + mat4 skinningXform = HdGet_skinningXforms(jointIndex); + + // model space -> bind space -> skel space + vec4 p = skinningXform * geomBindXform * vec4(HdGet_restPoints(index), 1); + + // skel space -> world space -> model space + // XXX: Casts to mat4 below are necessary because the matrices passed + // down use doubles and not floats. + mat4 skelToPrimLocal = mat4( HdGet_primWorldToLocal() ) * + mat4( HdGet_skelLocalToWorld() ); + p = skelToPrimLocal * p; + + HdSet_skinnedPoints(index, p.xyz); +} diff --git a/blender/lib/usd/usdUI/resources/generatedSchema.usda b/blender/lib/usd/usdUI/resources/generatedSchema.usda new file mode 100644 index 0000000..37abbaa --- /dev/null +++ b/blender/lib/usd/usdUI/resources/generatedSchema.usda @@ -0,0 +1,124 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class "NodeGraphNodeAPI" ( + doc = """ + This api helps storing information about nodes in node graphs. + """ +) +{ + uniform color3f ui:nodegraph:node:displayColor ( + doc = """ + This hint defines what tint the node should have in the node graph. + """ + ) + uniform token ui:nodegraph:node:expansionState ( + allowedTokens = ["open", "closed", "minimized"] + doc = """ + The current expansionState of the node in the ui. + 'open' = fully expanded + 'closed' = fully collapsed + 'minimized' = should take the least space possible + """ + ) + uniform asset ui:nodegraph:node:icon ( + doc = """ + This points to an image that should be displayed on the node. It is + intended to be useful for summary visual classification of nodes, rather + than a thumbnail preview of the computed result of the node in some + computational system. + """ + ) + uniform float2 ui:nodegraph:node:pos ( + doc = """ + Declared relative position to the parent in a node graph. + X is the horizontal position. + Y is the vertical position. Higher numbers correspond to lower positions + (coordinates are Qt style, not cartesian). + + These positions are not explicitly meant in pixel space, but rather + assume that the size of a node is approximately 1.0x1.0. Where size-x is + the node width and size-y height of the node. Depending on + graph UI implementation, the size of a node may vary in each direction. + + Example: If a node's width is 300 and it is position is at 1000, we + store for x-position: 1000 * (1.0/300) + """ + ) + uniform float2 ui:nodegraph:node:size ( + doc = """ + Optional size hint for a node in a node graph. + X is the width. + Y is the height. + + This value is optional, because node size is often determined + based on the number of in- and outputs of a node. + """ + ) + uniform int ui:nodegraph:node:stackingOrder ( + doc = """ + This optional value is a useful hint when an application cares about + the visibility of a node and whether each node overlaps another. + + Nodes with lower stacking order values are meant to be drawn below + higher ones. Negative values are meant as background. Positive values + are meant as foreground. + Undefined values should be treated as 0. + + There are no set limits in these values. + """ + ) +} + +class "SceneGraphPrimAPI" ( + doc = """ + Utility schema for display properties of a prim + """ +) +{ + uniform token ui:displayGroup ( + doc = """When publishing a nodegraph or a material, it can be useful to + provide an optional display group, for organizational purposes and + readability. This is because often the usd shading hierarchy is rather + flat while we want to display it in organized groups. + """ + ) + uniform token ui:displayName ( + doc = """When publishing a nodegraph or a material, it can be useful to + provide an optional display name, for readability. + """ + ) +} + +class Backdrop "Backdrop" ( + doc = """Provides a 'group-box' for the purpose of node graph organization. + + Unlike containers, backdrops do not store the Shader nodes inside of them. + Backdrops are an organizational tool that allows Shader nodes to be visually + grouped together in a node-graph UI, but there is no direct relationship + between a Shader node and a Backdrop. + + The guideline for a node-graph UI is that a Shader node is considered part + of a Backdrop when the Backdrop is the smallest Backdrop a Shader node's + bounding-box fits inside. + + Backdrop objects are contained inside a NodeGraph, similar to how Shader + objects are contained inside a NodeGraph. + + Backdrops have no shading inputs or outputs that influence the rendered + results of a NodeGraph. Therefore they can be safely ignored during import. + + Like Shaders and NodeGraphs, Backdrops subscribe to the NodeGraphNodeAPI to + specify position and size. + """ +) +{ + uniform token ui:description ( + doc = """The text label that is displayed on the backdrop in the node + graph. This help-description explains what the nodes in a backdrop do. + """ + ) +} + diff --git a/blender/lib/usd/usdUI/resources/plugInfo.json b/blender/lib/usd/usdUI/resources/plugInfo.json new file mode 100644 index 0000000..e0031c5 --- /dev/null +++ b/blender/lib/usd/usdUI/resources/plugInfo.json @@ -0,0 +1,48 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdUIBackdrop": { + "alias": { + "UsdSchemaBase": "Backdrop" + }, + "autoGenerated": true, + "bases": [ + "UsdTyped" + ], + "schemaKind": "concreteTyped" + }, + "UsdUINodeGraphNodeAPI": { + "alias": { + "UsdSchemaBase": "NodeGraphNodeAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + }, + "UsdUISceneGraphPrimAPI": { + "alias": { + "UsdSchemaBase": "SceneGraphPrimAPI" + }, + "autoGenerated": true, + "bases": [ + "UsdAPISchemaBase" + ], + "schemaKind": "singleApplyAPI" + } + } + }, + "LibraryPath": "", + "Name": "usdUI", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdUI/resources/usdUI/schema.usda b/blender/lib/usd/usdUI/resources/usdUI/schema.usda new file mode 100644 index 0000000..56ea190 --- /dev/null +++ b/blender/lib/usd/usdUI/resources/usdUI/schema.usda @@ -0,0 +1,171 @@ +#usda 1.0 +( + "This file describes the USD Shader schemata for code generation." + subLayers = [ + @usd/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdUI" + string libraryPath = "pxr/usd/usdUI" + # dictionary libraryTokens = {} + } +) +{ +} + +class "NodeGraphNodeAPI"( + inherits = + doc = """ + This api helps storing information about nodes in node graphs. + """ +) { + uniform float2 ui:nodegraph:node:pos ( + doc = """ + Declared relative position to the parent in a node graph. + X is the horizontal position. + Y is the vertical position. Higher numbers correspond to lower positions + (coordinates are Qt style, not cartesian). + + These positions are not explicitly meant in pixel space, but rather + assume that the size of a node is approximately 1.0x1.0. Where size-x is + the node width and size-y height of the node. Depending on + graph UI implementation, the size of a node may vary in each direction. + + Example: If a node's width is 300 and it is position is at 1000, we + store for x-position: 1000 * (1.0/300) + """ + customData = { + string apiName = "pos" + } + ) + + uniform int ui:nodegraph:node:stackingOrder ( + doc = """ + This optional value is a useful hint when an application cares about + the visibility of a node and whether each node overlaps another. + + Nodes with lower stacking order values are meant to be drawn below + higher ones. Negative values are meant as background. Positive values + are meant as foreground. + Undefined values should be treated as 0. + + There are no set limits in these values. + """ + customData = { + string apiName = "stackingOrder" + } + ) + + uniform color3f ui:nodegraph:node:displayColor ( + doc = """ + This hint defines what tint the node should have in the node graph. + """ + customData = { + string apiName = "displayColor" + } + ) + + uniform asset ui:nodegraph:node:icon ( + doc = """ + This points to an image that should be displayed on the node. It is + intended to be useful for summary visual classification of nodes, rather + than a thumbnail preview of the computed result of the node in some + computational system. + """ + customData = { + string apiName = "icon" + } + ) + + uniform token ui:nodegraph:node:expansionState ( + allowedTokens = ["open", "closed", "minimized"] + doc = """ + The current expansionState of the node in the ui. + 'open' = fully expanded + 'closed' = fully collapsed + 'minimized' = should take the least space possible + """ + customData = { + string apiName = "expansionState" + } + ) + + uniform float2 ui:nodegraph:node:size ( + doc = """ + Optional size hint for a node in a node graph. + X is the width. + Y is the height. + + This value is optional, because node size is often determined + based on the number of in- and outputs of a node. + """ + customData = { + string apiName = "size" + } + ) +} + + +class "SceneGraphPrimAPI"( + inherits = + doc = """ + Utility schema for display properties of a prim + """ +) { + uniform token ui:displayName ( + doc = """When publishing a nodegraph or a material, it can be useful to + provide an optional display name, for readability. + """ + customData = { + string apiName = "displayName" + } + ) + uniform token ui:displayGroup ( + doc = """When publishing a nodegraph or a material, it can be useful to + provide an optional display group, for organizational purposes and + readability. This is because often the usd shading hierarchy is rather + flat while we want to display it in organized groups. + """ + customData = { + string apiName = "displayGroup" + } + ) +} + +class Backdrop "Backdrop" ( + inherits = + doc = """Provides a 'group-box' for the purpose of node graph organization. + + Unlike containers, backdrops do not store the Shader nodes inside of them. + Backdrops are an organizational tool that allows Shader nodes to be visually + grouped together in a node-graph UI, but there is no direct relationship + between a Shader node and a Backdrop. + + The guideline for a node-graph UI is that a Shader node is considered part + of a Backdrop when the Backdrop is the smallest Backdrop a Shader node's + bounding-box fits inside. + + Backdrop objects are contained inside a NodeGraph, similar to how Shader + objects are contained inside a NodeGraph. + + Backdrops have no shading inputs or outputs that influence the rendered + results of a NodeGraph. Therefore they can be safely ignored during import. + + Like Shaders and NodeGraphs, Backdrops subscribe to the NodeGraphNodeAPI to + specify position and size. + """ +) +{ + uniform token ui:description ( + doc = """The text label that is displayed on the backdrop in the node + graph. This help-description explains what the nodes in a backdrop do. + """ + customData = { + string apiName = "description" + } + ) +} + diff --git a/blender/lib/usd/usdVol/resources/generatedSchema.usda b/blender/lib/usd/usdVol/resources/generatedSchema.usda new file mode 100644 index 0000000..44ef806 --- /dev/null +++ b/blender/lib/usd/usdVol/resources/generatedSchema.usda @@ -0,0 +1,483 @@ +#usda 1.0 +( + "WARNING: THIS FILE IS GENERATED BY usdGenSchema. DO NOT EDIT." +) + +class Volume "Volume" ( + doc = """A renderable volume primitive. A volume is made up of any number + of FieldBase primitives bound together in this volume. Each + FieldBase primitive is specified as a relationship with a + namespace prefix of \"field\". + + The relationship name is used by the renderer to associate + individual fields with the named input parameters on the volume + shader. Using this indirect approach to connecting fields to + shader parameters (rather than using the field prim's name) + allows a single field to be reused for different shader inputs, or + to be used as different shader parameters when rendering different + Volumes. This means that the name of the field prim is not + relevant to its contribution to the volume prims which refer to + it. Nor does the field prim's location in the scene graph have + any relevance, and Volumes may refer to fields anywhere in the + scene graph. **However**, unless Field prims need to be shared + by multiple Volumes, a Volume's Field prims should be located + under the Volume in namespace, for enhanced organization.""" +) +{ + uniform bool doubleSided = 0 ( + doc = """Although some renderers treat all parametric or polygonal + surfaces as if they were effectively laminae with outward-facing + normals on both sides, some renderers derive significant optimizations + by considering these surfaces to have only a single outward side, + typically determined by control-point winding order and/or + orientation. By doing so they can perform \"backface culling\" to + avoid drawing the many polygons of most closed surfaces that face away + from the viewer. + + However, it is often advantageous to model thin objects such as paper + and cloth as single, open surfaces that must be viewable from both + sides, always. Setting a gprim's doubleSided attribute to + \\c true instructs all renderers to disable optimizations such as + backface culling for the gprim, and attempt (not all renderers are able + to do so, but the USD reference GL renderer always will) to provide + forward-facing normals on each side of the surface for lighting + calculations.""" + ) + float3[] extent ( + doc = """Extent is a three dimensional range measuring the geometric + extent of the authored gprim in its own local space (i.e. its own + transform not applied), without accounting for any shader-induced + displacement. If __any__ extent value has been authored for a given + Boundable, then it should be authored at every timeSample at which + geometry-affecting properties are authored, to ensure correct + evaluation via ComputeExtent(). If __no__ extent value has been + authored, then ComputeExtent() will call the Boundable's registered + ComputeExtentFunction(), which may be expensive, which is why we + strongly encourage proper authoring of extent. + \\sa ComputeExtent() + \\sa \\ref UsdGeom_Boundable_Extent. + + An authored extent on a prim which has children is expected to include + the extent of all children, as they will be pruned from BBox computation + during traversal.""" + ) + uniform token orientation = "rightHanded" ( + allowedTokens = ["rightHanded", "leftHanded"] + doc = """Orientation specifies whether the gprim's surface normal + should be computed using the right hand rule, or the left hand rule. + Please see for a deeper explanation and + generalization of orientation to composed scenes with transformation + hierarchies.""" + ) + color3f[] primvars:displayColor ( + doc = '''It is useful to have an "official" colorSet that can be used + as a display or modeling color, even in the absence of any specified + shader for a gprim. DisplayColor serves this role; because it is a + UsdGeomPrimvar, it can also be used as a gprim override for any shader + that consumes a displayColor parameter.''' + ) + float[] primvars:displayOpacity ( + doc = """Companion to displayColor that specifies opacity, broken + out as an independent attribute rather than an rgba color, both so that + each can be independently overridden, and because shaders rarely consume + rgba parameters.""" + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "FieldBase" ( + doc = "Base class for field primitives." +) +{ + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class "FieldAsset" ( + doc = "Base class for field primitives defined by an external file." +) +{ + token fieldDataType ( + doc = """Token which is used to indicate the data type of an + individual field. Authors use this to tell consumers more + about the field without opening the file on disk. The list of + allowed tokens is specified with the specific asset type. + A missing value is considered an error.""" + ) + int fieldIndex ( + doc = """A file can contain multiple fields with the same + name. This optional attribute is an index used to + disambiguate between these multiple fields with the same + name.""" + ) + token fieldName ( + doc = """Name of an individual field within the file specified by + the filePath attribute.""" + ) + asset filePath ( + doc = """An asset path attribute that points to a file on disk. + For each supported file format, a separate FieldAsset + subclass is required. + + This attribute's value can be animated over time, as most + volume asset formats represent just a single timeSample of + a volume. However, it does not, at this time, support + any pattern substitutions like \"$F\". """ + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token vectorDataRoleHint = "None" ( + allowedTokens = ["None", "Point", "Normal", "Vector", "Color"] + doc = """Optional token which is used to indicate the role of a vector + valued field. This can drive the data type in which fields + are made available in a renderer or whether the vector values + are to be transformed.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class Field3DAsset "Field3DAsset" ( + doc = """Field3D field primitive. The FieldAsset filePath attribute must + specify a file in the Field3D format on disk.""" +) +{ + token fieldDataType ( + allowedTokens = ["half", "float", "double", "half3", "float3", "double3"] + doc = """Token which is used to indicate the data type of an + individual field. Authors use this to tell consumers more + about the field without opening the file on disk. The list of + allowed tokens reflects the available choices for Field3d + volumes.""" + ) + int fieldIndex ( + doc = """A file can contain multiple fields with the same + name. This optional attribute is an index used to + disambiguate between these multiple fields with the same + name.""" + ) + token fieldName ( + doc = """Name of an individual field within the file specified by + the filePath attribute.""" + ) + token fieldPurpose ( + doc = """Optional token which can be used to indicate the purpose or + grouping of an individual field. Clients which consume Field3D + files should treat this as the Field3D field name.""" + ) + asset filePath ( + doc = """An asset path attribute that points to a file on disk. + For each supported file format, a separate FieldAsset + subclass is required. + + This attribute's value can be animated over time, as most + volume asset formats represent just a single timeSample of + a volume. However, it does not, at this time, support + any pattern substitutions like \"$F\". """ + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token vectorDataRoleHint = "None" ( + allowedTokens = ["None", "Point", "Normal", "Vector", "Color"] + doc = """Optional token which is used to indicate the role of a vector + valued field. This can drive the data type in which fields + are made available in a renderer or whether the vector values + are to be transformed.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + +class OpenVDBAsset "OpenVDBAsset" ( + doc = """OpenVDB field primitive. The FieldAsset filePath attribute must + specify a file in the OpenVDB format on disk.""" +) +{ + token fieldClass ( + allowedTokens = ["levelSet", "fogVolume", "staggered", "unknown"] + doc = """Optional token which can be used to indicate the class of + an individual grid. This is a mapping to openvdb::GridClass + where the values are GRID_LEVEL_SET, GRID_FOG_VOLUME, + GRID_STAGGERED, and GRID_UNKNOWN.""" + ) + token fieldDataType ( + allowedTokens = ["half", "float", "double", "int", "uint", "int64", "half2", "float2", "double2", "int2", "half3", "float3", "double3", "int3", "matrix3d", "matrix4d", "quatd", "bool", "mask", "string"] + doc = """Token which is used to indicate the data type of an + individual field. Authors use this to tell consumers more + about the field without opening the file on disk. The list of + allowed tokens reflects the available choices for OpenVDB + volumes.""" + ) + int fieldIndex ( + doc = """A file can contain multiple fields with the same + name. This optional attribute is an index used to + disambiguate between these multiple fields with the same + name.""" + ) + token fieldName ( + doc = """Name of an individual field within the file specified by + the filePath attribute.""" + ) + asset filePath ( + doc = """An asset path attribute that points to a file on disk. + For each supported file format, a separate FieldAsset + subclass is required. + + This attribute's value can be animated over time, as most + volume asset formats represent just a single timeSample of + a volume. However, it does not, at this time, support + any pattern substitutions like \"$F\". """ + ) + rel proxyPrim ( + doc = '''The proxyPrim relationship allows us to link a + prim whose purpose is "render" to its (single target) + purpose="proxy" prim. This is entirely optional, but can be + useful in several scenarios: + + - In a pipeline that does pruning (for complexity management) + by deactivating prims composed from asset references, when we + deactivate a purpose="render" prim, we will be able to discover + and additionally deactivate its associated purpose="proxy" prim, + so that preview renders reflect the pruning accurately. + + - DCC importers may be able to make more aggressive optimizations + for interactive processing and display if they can discover the proxy + for a given render prim. + + - With a little more work, a Hydra-based application will be able + to map a picked proxy prim back to its render geometry for selection. + + \\note It is only valid to author the proxyPrim relationship on + prims whose purpose is "render".''' + ) + uniform token purpose = "default" ( + allowedTokens = ["default", "render", "proxy", "guide"] + doc = """Purpose is a classification of geometry into categories that + can each be independently included or excluded from traversals of prims + on a stage, such as rendering or bounding-box computation traversals. + + See for more detail about how + purpose is computed and used.""" + ) + token vectorDataRoleHint = "None" ( + allowedTokens = ["None", "Point", "Normal", "Vector", "Color"] + doc = """Optional token which is used to indicate the role of a vector + valued field. This can drive the data type in which fields + are made available in a renderer or whether the vector values + are to be transformed.""" + ) + token visibility = "inherited" ( + allowedTokens = ["inherited", "invisible"] + doc = '''Visibility is meant to be the simplest form of "pruning" + visibility that is supported by most DCC apps. Visibility is + animatable, allowing a sub-tree of geometry to be present for some + segment of a shot, and absent from others; unlike the action of + deactivating geometry prims, invisible geometry is still + available for inspection, for positioning, for defining volumes, etc.''' + ) + uniform token[] xformOpOrder ( + doc = """Encodes the sequence of transformation operations in the + order in which they should be pushed onto a transform stack while + visiting a UsdStage's prims in a graph traversal that will effect + the desired positioning for this prim and its descendant prims. + + You should rarely, if ever, need to manipulate this attribute directly. + It is managed by the AddXformOp(), SetResetXformStack(), and + SetXformOpOrder(), and consulted by GetOrderedXformOps() and + GetLocalTransformation().""" + ) +} + diff --git a/blender/lib/usd/usdVol/resources/plugInfo.json b/blender/lib/usd/usdVol/resources/plugInfo.json new file mode 100644 index 0000000..9a0c90a --- /dev/null +++ b/blender/lib/usd/usdVol/resources/plugInfo.json @@ -0,0 +1,68 @@ +# Portions of this file auto-generated by usdGenSchema. +# Edits will survive regeneration except for comments and +# changes to types with autoGenerated=true. +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdVolField3DAsset": { + "alias": { + "UsdSchemaBase": "Field3DAsset" + }, + "autoGenerated": true, + "bases": [ + "UsdVolFieldAsset" + ], + "schemaKind": "concreteTyped" + }, + "UsdVolFieldAsset": { + "alias": { + "UsdSchemaBase": "FieldAsset" + }, + "autoGenerated": true, + "bases": [ + "UsdVolFieldBase" + ], + "schemaKind": "abstractTyped" + }, + "UsdVolFieldBase": { + "alias": { + "UsdSchemaBase": "FieldBase" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomXformable" + ], + "schemaKind": "abstractTyped" + }, + "UsdVolOpenVDBAsset": { + "alias": { + "UsdSchemaBase": "OpenVDBAsset" + }, + "autoGenerated": true, + "bases": [ + "UsdVolFieldAsset" + ], + "schemaKind": "concreteTyped" + }, + "UsdVolVolume": { + "alias": { + "UsdSchemaBase": "Volume" + }, + "autoGenerated": true, + "bases": [ + "UsdGeomGprim" + ], + "schemaKind": "concreteTyped" + } + } + }, + "LibraryPath": "", + "Name": "usdVol", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/lib/usd/usdVol/resources/usdVol/schema.usda b/blender/lib/usd/usdVol/resources/usdVol/schema.usda new file mode 100644 index 0000000..b9108e8 --- /dev/null +++ b/blender/lib/usd/usdVol/resources/usdVol/schema.usda @@ -0,0 +1,144 @@ +#usda 1.0 + +( + subLayers = [ + @usdGeom/schema.usda@ + ] +) + +over "GLOBAL" ( + customData = { + string libraryName = "usdVol" + string libraryPath = "pxr/usd/usdVol" + dictionary libraryTokens = { + dictionary field = { + string doc = """This is the namespace prefix used to + specify the fields that make up a volume primitive.""" + } + } + } +) +{ +} + +class Volume "Volume" ( + inherits = + doc = """A renderable volume primitive. A volume is made up of any number + of FieldBase primitives bound together in this volume. Each + FieldBase primitive is specified as a relationship with a + namespace prefix of "field". + + The relationship name is used by the renderer to associate + individual fields with the named input parameters on the volume + shader. Using this indirect approach to connecting fields to + shader parameters (rather than using the field prim's name) + allows a single field to be reused for different shader inputs, or + to be used as different shader parameters when rendering different + Volumes. This means that the name of the field prim is not + relevant to its contribution to the volume prims which refer to + it. Nor does the field prim's location in the scene graph have + any relevance, and Volumes may refer to fields anywhere in the + scene graph. **However**, unless Field prims need to be shared + by multiple Volumes, a Volume's Field prims should be located + under the Volume in namespace, for enhanced organization.""" +) +{ +} + +class "FieldBase" ( + inherits = + doc = """Base class for field primitives.""" +) +{ +} + +class "FieldAsset" ( + doc = "Base class for field primitives defined by an external file." + inherits = +) +{ + asset filePath ( + doc = """An asset path attribute that points to a file on disk. + For each supported file format, a separate FieldAsset + subclass is required. + + This attribute's value can be animated over time, as most + volume asset formats represent just a single timeSample of + a volume. However, it does not, at this time, support + any pattern substitutions like \"$F\". """ + ) + token fieldName ( + doc = """Name of an individual field within the file specified by + the filePath attribute.""" + ) + int fieldIndex ( + doc = """A file can contain multiple fields with the same + name. This optional attribute is an index used to + disambiguate between these multiple fields with the same + name.""" + ) + token fieldDataType ( + doc = """Token which is used to indicate the data type of an + individual field. Authors use this to tell consumers more + about the field without opening the file on disk. The list of + allowed tokens is specified with the specific asset type. + A missing value is considered an error.""" + ) + token vectorDataRoleHint = "None" ( + allowedTokens = ["None", "Point", "Normal", "Vector", "Color"] + doc = """Optional token which is used to indicate the role of a vector + valued field. This can drive the data type in which fields + are made available in a renderer or whether the vector values + are to be transformed.""" + ) +} + +class Field3DAsset "Field3DAsset" ( + doc = """Field3D field primitive. The FieldAsset filePath attribute must + specify a file in the Field3D format on disk.""" + inherits = +) +{ + token fieldDataType ( + allowedTokens = ["half", "float", "double", + "half3", "float3", "double3"] + doc = """Token which is used to indicate the data type of an + individual field. Authors use this to tell consumers more + about the field without opening the file on disk. The list of + allowed tokens reflects the available choices for Field3d + volumes.""" + ) + token fieldPurpose ( + doc = """Optional token which can be used to indicate the purpose or + grouping of an individual field. Clients which consume Field3D + files should treat this as the Field3D field \\em name.""" + ) +} + +class OpenVDBAsset "OpenVDBAsset" ( + doc = """OpenVDB field primitive. The FieldAsset filePath attribute must + specify a file in the OpenVDB format on disk.""" + inherits = +) +{ + token fieldDataType ( + allowedTokens = ["half", "float", "double", "int", "uint", "int64", + "half2", "float2", "double2", "int2", + "half3", "float3", "double3", "int3", + "matrix3d", "matrix4d", "quatd", + "bool", "mask", "string"] + doc = """Token which is used to indicate the data type of an + individual field. Authors use this to tell consumers more + about the field without opening the file on disk. The list of + allowed tokens reflects the available choices for OpenVDB + volumes.""" + ) + token fieldClass ( + allowedTokens = ["levelSet", "fogVolume", "staggered", "unknown"] + doc = """Optional token which can be used to indicate the class of + an individual grid. This is a mapping to openvdb::GridClass + where the values are GRID_LEVEL_SET, GRID_FOG_VOLUME, + GRID_STAGGERED, and GRID_UNKNOWN.""" + ) +} + diff --git a/blender/lib/usd/usdVolImaging/resources/plugInfo.json b/blender/lib/usd/usdVolImaging/resources/plugInfo.json new file mode 100644 index 0000000..8a216a9 --- /dev/null +++ b/blender/lib/usd/usdVolImaging/resources/plugInfo.json @@ -0,0 +1,29 @@ +{ + "Plugins": [ + { + "Info": { + "Types": { + "UsdImagingOpenVDBAssetAdapter": { + "bases": [ + "UsdImagingFieldAdapter" + ], + "isInternal": true, + "primTypeName": "OpenVDBAsset" + }, + "UsdImagingField3DAssetAdapter": { + "bases": [ + "UsdImagingFieldAdapter" + ], + "isInternal": true, + "primTypeName": "Field3DAsset" + } + } + }, + "LibraryPath": "", + "Name": "usdVolImaging", + "ResourcePath": "resources", + "Root": "..", + "Type": "library" + } + ] +} diff --git a/blender/license/Apache-2.0.txt b/blender/license/Apache-2.0.txt new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/blender/license/Apache-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/blender/license/BSD-3-Clause.txt b/blender/license/BSD-3-Clause.txt new file mode 100644 index 0000000..645c959 --- /dev/null +++ b/blender/license/BSD-3-Clause.txt @@ -0,0 +1,26 @@ +The 3-Clause BSD License +------------------------ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Google Inc. nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/blender/license/BSL-1.0.txt b/blender/license/BSL-1.0.txt new file mode 100644 index 0000000..36b7cd9 --- /dev/null +++ b/blender/license/BSL-1.0.txt @@ -0,0 +1,23 @@ +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/blender/license/FTL.txt b/blender/license/FTL.txt new file mode 100644 index 0000000..ec866eb --- /dev/null +++ b/blender/license/FTL.txt @@ -0,0 +1,166 @@ +The FreeType Project LICENSE +---------------------------- + + 2006-Jan-27 + + Copyright 1996-2002, 2006 by + David Turner, Robert Wilhelm, and Werner Lemberg + + + +Introduction +============ + + The FreeType Project is distributed in several archive packages; + some of them may contain, in addition to the FreeType font engine, + various tools and contributions which rely on, or relate to, the + FreeType Project. + + This license applies to all files found in such packages, and + which do not fall under their own explicit license. The license + affects thus the FreeType font engine, the test programs, + documentation and makefiles, at the very least. + + This license was inspired by the BSD, Artistic, and IJG + (Independent JPEG Group) licenses, which all encourage inclusion + and use of free software in commercial and freeware products + alike. As a consequence, its main points are that: + + o We don't promise that this software works. However, we will be + interested in any kind of bug reports. (`as is' distribution) + + o You can use this software for whatever you want, in parts or + full form, without having to pay us. (`royalty-free' usage) + + o You may not pretend that you wrote this software. If you use + it, or only parts of it, in a program, you must acknowledge + somewhere in your documentation that you have used the + FreeType code. (`credits') + + We specifically permit and encourage the inclusion of this + software, with or without modifications, in commercial products. + We disclaim all warranties covering The FreeType Project and + assume no liability related to The FreeType Project. + + + Finally, many people asked us for a preferred form for a + credit/disclaimer to use in compliance with this license. We thus + encourage you to use the following text: + + """ + Portions of this software are copyright © <year> The FreeType + Project (www.freetype.org). All rights reserved. + """ + + Please replace <year> with the value from the FreeType version you + actually use. + + +Legal Terms +=========== + +0. Definitions +-------------- + + Throughout this license, the terms `package', `FreeType Project', + and `FreeType archive' refer to the set of files originally + distributed by the authors (David Turner, Robert Wilhelm, and + Werner Lemberg) as the `FreeType Project', be they named as alpha, + beta or final release. + + `You' refers to the licensee, or person using the project, where + `using' is a generic term including compiling the project's source + code as well as linking it to form a `program' or `executable'. + This program is referred to as `a program using the FreeType + engine'. + + This license applies to all files distributed in the original + FreeType Project, including all source code, binaries and + documentation, unless otherwise stated in the file in its + original, unmodified form as distributed in the original archive. + If you are unsure whether or not a particular file is covered by + this license, you must contact us to verify this. + + The FreeType Project is copyright (C) 1996-2000 by David Turner, + Robert Wilhelm, and Werner Lemberg. All rights reserved except as + specified below. + +1. No Warranty +-------------- + + THE FREETYPE PROJECT IS PROVIDED `AS IS' WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. IN NO EVENT WILL ANY OF THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY DAMAGES CAUSED BY THE USE OR THE INABILITY TO + USE, OF THE FREETYPE PROJECT. + +2. Redistribution +----------------- + + This license grants a worldwide, royalty-free, perpetual and + irrevocable right and license to use, execute, perform, compile, + display, copy, create derivative works of, distribute and + sublicense the FreeType Project (in both source and object code + forms) and derivative works thereof for any purpose; and to + authorize others to exercise some or all of the rights granted + herein, subject to the following conditions: + + o Redistribution of source code must retain this license file + (`FTL.TXT') unaltered; any additions, deletions or changes to + the original files must be clearly indicated in accompanying + documentation. The copyright notices of the unaltered, + original files must be preserved in all copies of source + files. + + o Redistribution in binary form must provide a disclaimer that + states that the software is based in part of the work of the + FreeType Team, in the distribution documentation. We also + encourage you to put an URL to the FreeType web page in your + documentation, though this isn't mandatory. + + These conditions apply to any software derived from or based on + the FreeType Project, not just the unmodified files. If you use + our work, you must acknowledge us. However, no fee need be paid + to us. + +3. Advertising +-------------- + + Neither the FreeType authors and contributors nor you shall use + the name of the other for commercial, advertising, or promotional + purposes without specific prior written permission. + + We suggest, but do not require, that you use one or more of the + following phrases to refer to this software in your documentation + or advertising materials: `FreeType Project', `FreeType Engine', + `FreeType library', or `FreeType Distribution'. + + As you have not signed this license, you are not required to + accept it. However, as the FreeType Project is copyrighted + material, only this license, or another one contracted with the + authors, grants you the right to use, distribute, and modify it. + Therefore, by using, distributing, or modifying the FreeType + Project, you indicate that you understand and accept all the terms + of this license. + +4. Contacts +----------- + + There are two mailing lists related to FreeType: + + o freetype@nongnu.org + + Discusses general use and applications of FreeType, as well as + future and wanted additions to the library and distribution. + If you are looking for support, start in this list if you + haven't found anything to help you in the documentation. + + o freetype-devel@nongnu.org + + Discusses bugs, as well as engine internals, design issues, + specific licenses, porting, etc. + + Our home page can be found at + + https://www.freetype.org diff --git a/blender/license/GPL-3.0.txt b/blender/license/GPL-3.0.txt new file mode 100644 index 0000000..94a9ed0 --- /dev/null +++ b/blender/license/GPL-3.0.txt @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/blender/license/GPL-license.txt b/blender/license/GPL-license.txt new file mode 100644 index 0000000..e8c0353 --- /dev/null +++ b/blender/license/GPL-license.txt @@ -0,0 +1,340 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Library General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Library General +Public License instead of this License. diff --git a/blender/license/MIT.txt b/blender/license/MIT.txt new file mode 100644 index 0000000..328dae3 --- /dev/null +++ b/blender/license/MIT.txt @@ -0,0 +1,20 @@ +The MIT License +--------------- + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/blender/license/OpenColorIO.txt b/blender/license/OpenColorIO.txt new file mode 100644 index 0000000..3dfcc78 --- /dev/null +++ b/blender/license/OpenColorIO.txt @@ -0,0 +1,27 @@ +Copyright (c) 2003-2010 Sony Pictures Imageworks Inc., et al. +All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +* Neither the name of Sony Pictures Imageworks nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/blender/license/OpenSSL.txt b/blender/license/OpenSSL.txt new file mode 100644 index 0000000..9601ab4 --- /dev/null +++ b/blender/license/OpenSSL.txt @@ -0,0 +1,125 @@ + + LICENSE ISSUES + ============== + + The OpenSSL toolkit stays under a double license, i.e. both the conditions of + the OpenSSL License and the original SSLeay license apply to the toolkit. + See below for the actual license texts. + + OpenSSL License + --------------- + +/* ==================================================================== + * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * openssl-core@openssl.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.openssl.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + * This product includes cryptographic software written by Eric Young + * (eay@cryptsoft.com). This product includes software written by Tim + * Hudson (tjh@cryptsoft.com). + * + */ + + Original SSLeay License + ----------------------- + +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + diff --git a/blender/license/Python.txt b/blender/license/Python.txt new file mode 100644 index 0000000..b3e8fb5 --- /dev/null +++ b/blender/license/Python.txt @@ -0,0 +1,49 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python +alone or in any derivative version, provided, however, that PSF's +License Agreement and PSF's notice of copyright, i.e., "Copyright (c) +2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights +Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + diff --git a/blender/license/THIRD-PARTY-LICENSES.txt b/blender/license/THIRD-PARTY-LICENSES.txt new file mode 100644 index 0000000..e43c3f1 --- /dev/null +++ b/blender/license/THIRD-PARTY-LICENSES.txt @@ -0,0 +1,4709 @@ +** Blosc; version 1.21.1 -- https://github.com/Blosc/c-blosc +For Blosc - A blocking, shuffling and lossless compression library + +Copyright (C) 2009-2018 Francesc Alted +Copyright (C) 2019-present Blosc Development team + +Copyright (C) 2006 by Rob Landley + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +------ + +** Audaspace; version 1.3.0 -- https://audaspace.github.io/ +** Cuda Wrangler; version cbf465b -- https://github.com/CudaWrangler/cuew +** Draco; version 1.3.6 -- https://google.github.io/draco/ +** Embree; version 4.1.0 -- https://github.com/embree/embree +** Intel(R) oneAPI DPC++ compiler; version 2022-12 -- +https://github.com/intel/llvm#oneapi-dpc-compiler +** Intel® Open Path Guiding Library; version 0.5.0 -- http://www.openpgl.org/ +** Mantaflow; version 0.13 -- http://mantaflow.com/ +** materialX; version 1.38.6 -- +https://github.com/AcademySoftwareFoundation/MaterialX +** meson; version 0.63 -- https://github.com/mesonbuild/meson +** oneAPI Threading Building Block; version 2020_U3 -- +https://software.intel.com/en-us/oneapi/onetbb +** OpenCL Wrangler; version 27a6867 -- https://github.com/OpenCLWrangler/clew +** OpenImageDenoise; version 1.4.3 -- https://www.openimagedenoise.org/ +** OpenSSL; version 3.0.9 -- https://www.openssl.org/ +** OpenXR SDK; version 1.0.17 -- https://khronos.org/openxr +** RangeTree; version 40ebed8aa209 -- https://github.com/ideasman42/rangetree-c +** SDL Extension Wrangler; version 15edf8e -- +https://github.com/SDLWrangler/sdlew +** ShaderC; version 2022.3 -- https://github.com/google/shaderc +** SYCL Unified Runtime ; version fd711c920acc4434cb52ff18b078c082d9d7f44d -- +https://github.com/oneapi-src/unified-runtime +** Vulkan Loader; version 1.2.198 -- +https://github.com/KhronosGroup/Vulkan-Loader + +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND +DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, and + distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by the + copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all other + entities that control, are controlled by, or are under common control + with that entity. For the purposes of this definition, "control" means + (i) the power, direct or indirect, to cause the direction or management + of such entity, whether by contract or otherwise, or (ii) ownership of + fifty percent (50%) or more of the outstanding shares, or (iii) + beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity exercising + permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation source, + and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but not limited + to compiled object code, generated documentation, and conversions to + other media types. + + "Work" shall mean the work of authorship, whether in Source or Object + form, made available under the License, as indicated by a copyright + notice that is included in or attached to the work (an example is + provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object form, + that is based on (or derived from) the Work and for which the editorial + revisions, annotations, elaborations, or other modifications represent, + as a whole, an original work of authorship. For the purposes of this + License, Derivative Works shall not include works that remain separable + from, or merely link (or bind by name) to the interfaces of, the Work and + Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including the original + version of the Work and any modifications or additions to that Work or + Derivative Works thereof, that is intentionally submitted to Licensor for + inclusion in the Work by the copyright owner or by an individual or Legal + Entity authorized to submit on behalf of the copyright owner. For the + purposes of this definition, "submitted" means any form of electronic, + verbal, or written communication sent to the Licensor or its + representatives, including but not limited to communication on electronic + mailing lists, source code control systems, and issue tracking systems + that are managed by, or on behalf of, the Licensor for the purpose of + discussing and improving the Work, but excluding communication that is + conspicuously marked or otherwise designated in writing by the copyright + owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity on + behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable copyright license to + reproduce, prepare Derivative Works of, publicly display, publicly perform, + sublicense, and distribute the Work and such Derivative Works in Source or + Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable (except as stated in + this section) patent license to make, have made, use, offer to sell, sell, + import, and otherwise transfer the Work, where such license applies only to + those patent claims licensable by such Contributor that are necessarily + infringed by their Contribution(s) alone or by combination of their + Contribution(s) with the Work to which such Contribution(s) was submitted. + If You institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work or a + Contribution incorporated within the Work constitutes direct or contributory + patent infringement, then any patent licenses granted to You under this + License for that Work shall terminate as of the date such litigation is + filed. + + 4. Redistribution. You may reproduce and distribute copies of the Work or + Derivative Works thereof in any medium, with or without modifications, and + in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a + copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating + that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You + distribute, all copyright, patent, trademark, and attribution notices + from the Source form of the Work, excluding those notices that do not + pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must include + a readable copy of the attribution notices contained within such NOTICE + file, excluding those notices that do not pertain to any part of the + Derivative Works, in at least one of the following places: within a + NOTICE text file distributed as part of the Derivative Works; within the + Source form or documentation, if provided along with the Derivative + Works; or, within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents of the + NOTICE file are for informational purposes only and do not modify the + License. You may add Your own attribution notices within Derivative Works + that You distribute, alongside or as an addendum to the NOTICE text from + the Work, provided that such additional attribution notices cannot be + construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may + provide additional or different license terms and conditions for use, + reproduction, or distribution of Your modifications, or for any such + Derivative Works as a whole, provided Your use, reproduction, and + distribution of the Work otherwise complies with the conditions stated in + this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, any + Contribution intentionally submitted for inclusion in the Work by You to the + Licensor shall be under the terms and conditions of this License, without + any additional terms or conditions. Notwithstanding the above, nothing + herein shall supersede or modify the terms of any separate license agreement + you may have executed with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, except + as required for reasonable and customary use in describing the origin of the + Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in + writing, Licensor provides the Work (and each Contributor provides its + Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied, including, without limitation, any + warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or + FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining + the appropriateness of using or redistributing the Work and assume any risks + associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, whether + in tort (including negligence), contract, or otherwise, unless required by + applicable law (such as deliberate and grossly negligent acts) or agreed to + in writing, shall any Contributor be liable to You for damages, including + any direct, indirect, special, incidental, or consequential damages of any + character arising as a result of this License or out of the use or inability + to use the Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all other + commercial damages or losses), even if such Contributor has been advised of + the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing the Work + or Derivative Works thereof, You may choose to offer, and charge a fee for, + acceptance of support, warranty, indemnity, or other liability obligations + and/or rights consistent with this License. However, in accepting such + obligations, You may act only on Your own behalf and on Your sole + responsibility, not on behalf of any other Contributor, and only if You + agree to indemnify, defend, and hold each Contributor harmless for any + liability incurred by, or claims asserted against, such Contributor by + reason of your accepting any such warranty or additional liability. END OF + TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification +within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); + +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software + +distributed under the License is distributed on an "AS IS" BASIS, + +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +See the License for the specific language governing permissions and + +limitations under the License. + +* For Audaspace see also this required NOTICE: + Copyright © 2009-2020 Jörg Müller. All rights reserved. +* For Cuda Wrangler see also this required NOTICE: + Copyright 2011-2014 Blender Foundation +* For Draco see also this required NOTICE: + Copyright 2018 The Draco Authors +* For Embree see also this required NOTICE: + Copyright 2009-2020 Intel Corporation +* For Intel(R) oneAPI DPC++ compiler see also this required NOTICE: + Copyright (C) 2021 Intel Corporation +* For Intel® Open Path Guiding Library see also this required NOTICE: + Copyright 2020 Intel Corporation. +* For Mantaflow see also this required NOTICE: + MantaFlow fluid solver framework + Copyright 2011 Tobias Pfaff, Nils Thuerey +* For materialX see also this required NOTICE: + Copyright Contributors to the MaterialX Project +* For meson see also this required NOTICE: + Jussi Pakkanen + https://github.com/mesonbuild/meson/blob/master/CODEOWNERS +* For oneAPI Threading Building Block see also this required NOTICE: + Copyright (c) 2005-2020 Intel Corporation +* For OpenCL Wrangler see also this required NOTICE: + Copyright (c) 2009 Organic Vectory B.V. + Written by George van Venrooij +* For OpenImageDenoise see also this required NOTICE: + Copyright 2009-2020 Intel Corporation +* For OpenSSL see also this required NOTICE: + Copyright (c) 1998-2023 The OpenSSL Project Authors + Copyright (c) 1995-1998 Eric A. Young, Tim J. Hudson + + All rights reserved. +* For OpenXR SDK see also this required NOTICE: + Copyright (c) 2017-2020 The Khronos Group Inc. + Copyright (c) 2017-2019 Valve Corporation + Copyright (c) 2017-2019 LunarG, Inc. + Copyright (c) 2019 Collabora, Ltd. +* For RangeTree see also this required NOTICE: + Copyright (c) 2016, Campbell Barton. +* For SDL Extension Wrangler see also this required NOTICE: + Copyright 2014 Blender Foundation +* For ShaderC see also this required NOTICE: + Copyright 2015 The Shaderc Authors. All rights reserved. +* For SYCL Unified Runtime see also this required NOTICE: + Copyright (C) 2022-2023 Intel Corporation +* For Vulkan Loader see also this required NOTICE: + Copyright (c) 2019 The Khronos Group Inc. + Copyright (c) 2019 Valve Corporation + Copyright (c) 2019 LunarG, Inc. + Copyright (c) 2019 Google Inc. + +------ + +** pybind11; version 2.10.1 -- https://github.com/pybind/pybind11 +Copyright (c) 2016 Wenzel Jakob , All rights reserved. + +Copyright (c) 2016 Wenzel Jakob , All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Please also refer to the file .github/CONTRIBUTING.md, which clarifies +licensing of +external contributions to this project including patches, pull requests, etc. + +------ + +** libAOM; version 3.4.0 -- https://aomedia.googlesource.com/aom/ +Copyright (c) 2016, Alliance for Open Media. All rights reserved. +** NASM; version 2.15.02 -- https://www.nasm.us/ +Copyright 1996-2010 the NASM Authors - All rights reserved. +** OpenJPEG; version 2.5.0 -- https://github.com/uclouvain/openjpeg +Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium +Copyright (c) 2002-2014, Professor Benoit Macq +Copyright (c) 2003-2014, Antonin Descampe +Copyright (c) 2003-2009, Francois-Olivier Devaux +Copyright (c) 2005, Herve Drolon, FreeImage Team +Copyright (c) 2002-2003, Yannick Verschueren +Copyright (c) 2001-2003, David Janssens +Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France +Copyright (c) 2012, CS Systemes d'Information, France + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** Ceres Solver; version 2.0.0 -- http://ceres-solver.org/ +Ceres Solver - A fast non-linear least squares minimizer +Copyright 2016 Google Inc. All rights reserved. +** Curve-Fit-nD; version ddcd5bd -- https://github.com/ideasman42/curve-fit-nd +Copyright (c) 2016, DWANGO Co., Ltd. +Copyright (c) 2016, Campbell Barton +All rights reserved. +** Google C++ Testing Framework; version 1.10.0 -- +https://github.com/google/googletest +Copyright 2007, Google Inc. +All rights reserved. +** Google Flags; version 2.2.1 -- https://github.com/gflags/gflags +Copyright (c) 1999, Google Inc. +All rights reserved. +** Google Logging; version 4.4.0 -- https://github.com/google/glog +Copyright (c) 2006, Google Inc. +All rights reserved. +** Imath; version 3.1.7 -- https://github.com/AcademySoftwareFoundation/Imath +Copyright Contributors to the OpenEXR Project. All rights reserved. +** ISPC; version 1.17.0 -- https://github.com/ispc/ispc +Copyright Intel Corporation +All rights reserved. +** NumPy; version 1.23.5 -- https://numpy.org/ +Copyright (c) 2005-2022, NumPy Developers. +All rights reserved. +** Ogg; version 1.3.5 -- https://www.xiph.org/ogg/ +COPYRIGHT (C) 1994-2019 by the Xiph.Org Foundation https://www.xiph.org/ +** Open Shading Language; version +1.13-dev-1a7670600c8b08c2443a78d03c8c27e9a1149140 -- +https://github.com/imageworks/OpenShadingLanguage +Copyright Contributors to the Open Shading Language project. +** OpenColorIO; version 2.2.0 -- +https://github.com/AcademySoftwareFoundation/OpenColorIO +Copyright Contributors to the OpenColorIO Project. +** OpenEXR; version 3.1.7 -- +https://github.com/AcademySoftwareFoundation/openexr +Copyright Contributors to the OpenEXR Project. All rights reserved. +** OpenImageIO; version 2.4.11.0 -- http://www.openimageio.org +Copyright (c) 2008-present by Contributors to the OpenImageIO project. All +Rights Reserved. +** Pystring; version 1.1.3 -- https://github.com/imageworks/pystring +Copyright (c) 2008-2010, Sony Pictures Imageworks Inc +All rights reserved. +** Vorbis; version 1.3.7 -- https://xiph.org/vorbis/ +Copyright (c) 2002-2020 Xiph.org Foundation +** VPX; version 1.11.0 -- https://github.com/webmproject/libvpx +Copyright (c) 2010, The WebM Project authors. All rights reserved. +** WebP; version 1.2.2 -- https://github.com/webmproject/libwebp +Copyright (c) 2010, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** AutoPackage; version 1.0 -- http://autopackage.org +BinReloc - a library for creating relocatable executables +Written by: Hongli Lai +** LZMA SDK; version 5.2.5 -- https://www.7-zip.org/sdk.html +LZMA SDK: Public Domain + +Creative Commons Legal Code + +CC0 1.0 Universal CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT +PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN +ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN +"AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE USE OF THIS +DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER, AND DISCLAIMS +LIABILITY FOR DAMAGES RESULTING FROM THE USE OF THIS DOCUMENT OR THE +INFORMATION OR WORKS PROVIDED HEREUNDER. + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator and +subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for the +purpose of contributing to a commons of creative, cultural and scientific works +("Commons") that the public can reliably and without fear of later claims of +infringement build upon, modify, incorporate in other works, reuse and +redistribute as freely as possible in any form whatsoever and for any purposes, +including without limitation commercial purposes. These owners may contribute +to the Commons to promote the ideal of a free culture and the further +production of creative, cultural and scientific works, or to gain reputation or +greater distribution for their Work in part through the use and efforts of +others. + +For these and/or other purposes and motivations, and without any expectation of +additional consideration or compensation, the person associating CC0 with a +Work (the "Affirmer"), to the extent that he or she is an owner of Copyright +and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and +publicly distribute the Work under its terms, with knowledge of his or her +Copyright and Related Rights in the Work and the meaning and intended legal +effect of CC0 on those rights. + + 1. Copyright and Related Rights. A Work made available under CC0 may be + protected by copyright and related or neighboring rights ("Copyright and + Related Rights"). Copyright and Related Rights include, but are not limited + to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, + communicate, and translate a Work; + + ii. moral rights retained by the original author(s) and/or performer(s); + + iii. publicity and privacy rights pertaining to a person's image or + likeness depicted in a Work; + + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + + v. rights protecting the extraction, dissemination, use and reuse of data + in a Work; + + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation thereof, + including any amended or successor version of such directive); and + + vii. other similar, equivalent or corresponding rights throughout the + world based on applicable law or treaty, and any national implementations + thereof. + + 2. Waiver. To the greatest extent permitted by, but not in contravention of, + applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and + unconditionally waives, abandons, and surrenders all of Affirmer's Copyright + and Related Rights and associated claims and causes of action, whether now + known or unknown (including existing as well as future claims and causes of + action), in the Work (i) in all territories worldwide, (ii) for the maximum + duration provided by applicable law or treaty (including future time + extensions), (iii) in any current or future medium and for any number of + copies, and (iv) for any purpose whatsoever, including without limitation + commercial, advertising or promotional purposes (the "Waiver"). Affirmer + makes the Waiver for the benefit of each member of the public at large and + to the detriment of Affirmer's heirs and successors, fully intending that + such Waiver shall not be subject to revocation, rescission, cancellation, + termination, or any other legal or equitable action to disrupt the quiet + enjoyment of the Work by the public as contemplated by Affirmer's express + Statement of Purpose. + + 3. Public License Fallback. Should any part of the Waiver for any reason be + judged legally invalid or ineffective under applicable law, then the Waiver + shall be preserved to the maximum extent permitted taking into account + Affirmer's express Statement of Purpose. In addition, to the extent the + Waiver is so judged Affirmer hereby grants to each affected person a + royalty-free, non transferable, non sublicensable, non exclusive, + irrevocable and unconditional license to exercise Affirmer's Copyright and + Related Rights in the Work (i) in all territories worldwide, (ii) for the + maximum duration provided by applicable law or treaty (including future time + extensions), (iii) in any current or future medium and for any number of + copies, and (iv) for any purpose whatsoever, including without limitation + commercial, advertising or promotional purposes (the "License"). The License + shall be deemed effective as of the date CC0 was applied by Affirmer to the + Work. Should any part of the License for any reason be judged legally + invalid or ineffective under applicable law, such partial invalidity or + ineffectiveness shall not invalidate the remainder of the License, and in + such case Affirmer hereby affirms that he or she will not (i) exercise any + of his or her remaining Copyright and Related Rights in the Work or (ii) + assert any associated claims and causes of action with respect to the Work, + in either case contrary to Affirmer's express Statement of Purpose. + + 4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, statutory + or otherwise, including without limitation warranties of title, + merchantability, fitness for a particular purpose, non infringement, or + the absence of latent or other defects, accuracy, or the present or + absence of errors, whether or not discoverable, all to the greatest + extent permissible under applicable law. + + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the Work. + + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to this + CC0 or use of the Work. + +------ + +** FLAC; version 1.4.2 -- https://xiph.org/flac/ +Copyright (C) 2001-2009 Josh Coalson +Copyright (C) 2011-2016 Xiph.Org Foundation +** Potrace; version 1.16 -- http://potrace.sourceforge.net/ +Copyright (C) 2001-2019 Peter Selinger. + +GNU GENERAL PUBLIC LICENSE + +Version 2, June 1991 + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. + +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 , USA + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to share +and change it. By contrast, the GNU General Public License is intended to +guarantee your freedom to share and change free software--to make sure the +software is free for all its users. This General Public License applies to most +of the Free Software Foundation's software and to any other program whose +authors commit to using it. (Some other Free Software Foundation software is +covered by the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our +General Public Licenses are designed to make sure that you have the freedom to +distribute copies of free software (and charge for this service if you wish), +that you receive source code or can get it if you want it, that you can change +the software or use pieces of it in new free programs; and that you know you +can do these things. + +To protect your rights, we need to make restrictions that forbid anyone to deny +you these rights or to ask you to surrender the rights. These restrictions +translate to certain responsibilities for you if you distribute copies of the +software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis or for +a fee, you must give the recipients all the rights that you have. You must make +sure that they, too, receive or can get the source code. And you must show them +these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and (2) +offer you this license which gives you legal permission to copy, distribute +and/or modify the software. + +Also, for each author's protection and ours, we want to make certain that +everyone understands that there is no warranty for this free software. If the +software is modified by someone else and passed on, we want its recipients to +know that what they have is not the original, so that any problems introduced +by others will not reflect on the original authors' reputations. + +Finally, any free program is threatened constantly by software patents. We wish +to avoid the danger that redistributors of a free program will individually +obtain patent licenses, in effect making the program proprietary. To prevent +this, we have made it clear that any patent must be licensed for everyone's +free use or not licensed at all. + +The precise terms and conditions for copying, distribution and modification +follow. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains a notice + placed by the copyright holder saying it may be distributed under the terms + of this General Public License. The "Program", below, refers to any such + program or work, and a "work based on the Program" means either the Program + or any derivative work under copyright law: that is to say, a work + containing the Program or a portion of it, either verbatim or with + modifications and/or translated into another language. (Hereinafter, + translation is included without limitation in the term "modification".) Each + licensee is addressed as "you". + + Activities other than copying, distribution and modification are not covered + by this License; they are outside its scope. The act of running the Program + is not restricted, and the output from the Program is covered only if its + contents constitute a work based on the Program (independent of having been + made by running the Program). Whether that is true depends on what the + Program does. + + 1. You may copy and distribute verbatim copies of the Program's source code + as you receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice and + disclaimer of warranty; keep intact all the notices that refer to this + License and to the absence of any warranty; and give any other recipients of + the Program a copy of this License along with the Program. + + You may charge a fee for the physical act of transferring a copy, and you + may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion of it, + thus forming a work based on the Program, and copy and distribute such + modifications or work under the terms of Section 1 above, provided that you + also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices stating + that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in whole + or in part contains or is derived from the Program or any part thereof, + to be licensed as a whole at no charge to all third parties under the + terms of this License. + + c) If the modified program normally reads commands interactively when + run, you must cause it, when started running for such interactive use in + the most ordinary way, to print or display an announcement including an + appropriate copyright notice and a notice that there is no warranty (or + else, saying that you provide a warranty) and that users may redistribute + the program under these conditions, and telling the user how to view a + copy of this License. (Exception: if the Program itself is interactive + but does not normally print such an announcement, your work based on the + Program is not required to print an announcement.) + + These requirements apply to the modified work as a whole. If identifiable + sections of that work are not derived from the Program, and can be + reasonably considered independent and separate works in themselves, then + this License, and its terms, do not apply to those sections when you + distribute them as separate works. But when you distribute the same sections + as part of a whole which is a work based on the Program, the distribution of + the whole must be on the terms of this License, whose permissions for other + licensees extend to the entire whole, and thus to each and every part + regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest your + rights to work written entirely by you; rather, the intent is to exercise + the right to control the distribution of derivative or collective works + based on the Program. + + In addition, mere aggregation of another work not based on the Program with + the Program (or with a work based on the Program) on a volume of a storage + or distribution medium does not bring the other work under the scope of this + License. + + 3. You may copy and distribute the Program (or a work based on it, under + Section 2) in object code or executable form under the terms of Sections 1 + and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable source + code, which must be distributed under the terms of Sections 1 and 2 above + on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three years, to + give any third party, for a charge no more than your cost of physically + performing source distribution, a complete machine-readable copy of the + corresponding source code, to be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer to + distribute corresponding source code. (This alternative is allowed only + for noncommercial distribution and only if you received the program in + object code or executable form with such an offer, in accord with + Subsection b above.) + + The source code for a work means the preferred form of the work for making + modifications to it. For an executable work, complete source code means all + the source code for all modules it contains, plus any associated interface + definition files, plus the scripts used to control compilation and + installation of the executable. However, as a special exception, the source + code distributed need not include anything that is normally distributed (in + either source or binary form) with the major components (compiler, kernel, + and so on) of the operating system on which the executable runs, unless that + component itself accompanies the executable. + + If distribution of executable or object code is made by offering access to + copy from a designated place, then offering equivalent access to copy the + source code from the same place counts as distribution of the source code, + even though third parties are not compelled to copy the source along with + the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program except as + expressly provided under this License. Any attempt otherwise to copy, + modify, sublicense or distribute the Program is void, and will automatically + terminate your rights under this License. However, parties who have received + copies, or rights, from you under this License will not have their licenses + terminated so long as such parties remain in full compliance. + + 5. You are not required to accept this License, since you have not signed + it. However, nothing else grants you permission to modify or distribute the + Program or its derivative works. These actions are prohibited by law if you + do not accept this License. Therefore, by modifying or distributing the + Program (or any work based on the Program), you indicate your acceptance of + this License to do so, and all its terms and conditions for copying, + distributing or modifying the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the + Program), the recipient automatically receives a license from the original + licensor to copy, distribute or modify the Program subject to these terms + and conditions. You may not impose any further restrictions on the + recipients' exercise of the rights granted herein. You are not responsible + for enforcing compliance by third parties to this License. + + 7. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot distribute so + as to satisfy simultaneously your obligations under this License and any + other pertinent obligations, then as a consequence you may not distribute + the Program at all. For example, if a patent license would not permit + royalty-free redistribution of the Program by all those who receive copies + directly or indirectly through you, then the only way you could satisfy both + it and this License would be to refrain entirely from distribution of the + Program. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply and + the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any patents + or other property right claims or to contest validity of any such claims; + this section has the sole purpose of protecting the integrity of the free + software distribution system, which is implemented by public license + practices. Many people have made generous contributions to the wide range of + software distributed through that system in reliance on consistent + application of that system; it is up to the author/donor to decide if he or + she is willing to distribute software through any other system and a + licensee cannot impose that choice. + + This section is intended to make thoroughly clear what is believed to be a + consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in certain + countries either by patents or by copyrighted interfaces, the original + copyright holder who places the Program under this License may add an + explicit geographical distribution limitation excluding those countries, so + that distribution is permitted only in or among countries not thus excluded. + In such case, this License incorporates the limitation as if written in the + body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions of + the General Public License from time to time. Such new versions will be + similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Program does not specify a version number of + this License, you may choose any version ever published by the Free Software + Foundation. + + 10. If you wish to incorporate parts of the Program into other free programs + whose distribution conditions are different, write to the author to ask for + permission. For software which is copyrighted by the Free Software + Foundation, write to the Free Software Foundation; we sometimes make + exceptions for this. Our decision will be guided by the two goals of + preserving the free status of all derivatives of our free software and of + promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR + THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO + THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM + PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR + CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR + THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible +use to the public, the best way to achieve this is to make it free software +which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach +them to the start of each source file to most effectively convey the exclusion +of warranty; and each file should have at least the "copyright" line and a +pointer to where the full notice is found. + + + +Copyright (C) + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 51 Franklin +Street, Fifth Floor, Boston, MA 02110-1301 , USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this when it +starts in an interactive mode: + +Gnomovision version 69, Copyright (C) year name of author Gnomovision comes +with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, +and you are welcome to redistribute it under certain conditions; type `show c' +for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may be +called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your school, +if any, to sign a "copyright disclaimer" for the program, if necessary. Here is +a sample; alter the names: + +Yoyodyne, Inc., hereby disclaims all copyright interest in the program +`Gnomovision' (which makes passes at compilers) written by James Hacker. + +< signature of Ty Coon > , 1 April 1989 Ty Coon, President of Vice + +------ + +** FFTW; version 3.3.10 -- http://www.fftw.org/ +Copyright (c) 2003, 2007-14 Matteo Frigo +Copyright (c) 2003, 2007-14 Massachusetts Institute of Technology +** GMP; version 6.2.1 -- https://gmplib.org/ +Copyright 1996-2020 Free Software Foundation, Inc. +** OpenAL; version 1.21.1 -- http://openal-soft.org +Copyright (c) 2015, Archontis Politis +Copyright (c) 2019, Christopher Robinson + +GNU GENERAL PUBLIC LICENSE + +Version 2, June 1991 + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. + +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 , USA + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to share +and change it. By contrast, the GNU General Public License is intended to +guarantee your freedom to share and change free software--to make sure the +software is free for all its users. This General Public License applies to most +of the Free Software Foundation's software and to any other program whose +authors commit to using it. (Some other Free Software Foundation software is +covered by the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our +General Public Licenses are designed to make sure that you have the freedom to +distribute copies of free software (and charge for this service if you wish), +that you receive source code or can get it if you want it, that you can change +the software or use pieces of it in new free programs; and that you know you +can do these things. + +To protect your rights, we need to make restrictions that forbid anyone to deny +you these rights or to ask you to surrender the rights. These restrictions +translate to certain responsibilities for you if you distribute copies of the +software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis or for +a fee, you must give the recipients all the rights that you have. You must make +sure that they, too, receive or can get the source code. And you must show them +these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and (2) +offer you this license which gives you legal permission to copy, distribute +and/or modify the software. + +Also, for each author's protection and ours, we want to make certain that +everyone understands that there is no warranty for this free software. If the +software is modified by someone else and passed on, we want its recipients to +know that what they have is not the original, so that any problems introduced +by others will not reflect on the original authors' reputations. + +Finally, any free program is threatened constantly by software patents. We wish +to avoid the danger that redistributors of a free program will individually +obtain patent licenses, in effect making the program proprietary. To prevent +this, we have made it clear that any patent must be licensed for everyone's +free use or not licensed at all. + +The precise terms and conditions for copying, distribution and modification +follow. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains a notice + placed by the copyright holder saying it may be distributed under the terms + of this General Public License. The "Program", below, refers to any such + program or work, and a "work based on the Program" means either the Program + or any derivative work under copyright law: that is to say, a work + containing the Program or a portion of it, either verbatim or with + modifications and/or translated into another language. (Hereinafter, + translation is included without limitation in the term "modification".) Each + licensee is addressed as "you". + + Activities other than copying, distribution and modification are not covered + by this License; they are outside its scope. The act of running the Program + is not restricted, and the output from the Program is covered only if its + contents constitute a work based on the Program (independent of having been + made by running the Program). Whether that is true depends on what the + Program does. + + 1. You may copy and distribute verbatim copies of the Program's source code + as you receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice and + disclaimer of warranty; keep intact all the notices that refer to this + License and to the absence of any warranty; and give any other recipients of + the Program a copy of this License along with the Program. + + You may charge a fee for the physical act of transferring a copy, and you + may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion of it, + thus forming a work based on the Program, and copy and distribute such + modifications or work under the terms of Section 1 above, provided that you + also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices stating + that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in whole + or in part contains or is derived from the Program or any part thereof, + to be licensed as a whole at no charge to all third parties under the + terms of this License. + + c) If the modified program normally reads commands interactively when + run, you must cause it, when started running for such interactive use in + the most ordinary way, to print or display an announcement including an + appropriate copyright notice and a notice that there is no warranty (or + else, saying that you provide a warranty) and that users may redistribute + the program under these conditions, and telling the user how to view a + copy of this License. (Exception: if the Program itself is interactive + but does not normally print such an announcement, your work based on the + Program is not required to print an announcement.) + + These requirements apply to the modified work as a whole. If identifiable + sections of that work are not derived from the Program, and can be + reasonably considered independent and separate works in themselves, then + this License, and its terms, do not apply to those sections when you + distribute them as separate works. But when you distribute the same sections + as part of a whole which is a work based on the Program, the distribution of + the whole must be on the terms of this License, whose permissions for other + licensees extend to the entire whole, and thus to each and every part + regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest your + rights to work written entirely by you; rather, the intent is to exercise + the right to control the distribution of derivative or collective works + based on the Program. + + In addition, mere aggregation of another work not based on the Program with + the Program (or with a work based on the Program) on a volume of a storage + or distribution medium does not bring the other work under the scope of this + License. + + 3. You may copy and distribute the Program (or a work based on it, under + Section 2) in object code or executable form under the terms of Sections 1 + and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable source + code, which must be distributed under the terms of Sections 1 and 2 above + on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three years, to + give any third party, for a charge no more than your cost of physically + performing source distribution, a complete machine-readable copy of the + corresponding source code, to be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer to + distribute corresponding source code. (This alternative is allowed only + for noncommercial distribution and only if you received the program in + object code or executable form with such an offer, in accord with + Subsection b above.) + + The source code for a work means the preferred form of the work for making + modifications to it. For an executable work, complete source code means all + the source code for all modules it contains, plus any associated interface + definition files, plus the scripts used to control compilation and + installation of the executable. However, as a special exception, the source + code distributed need not include anything that is normally distributed (in + either source or binary form) with the major components (compiler, kernel, + and so on) of the operating system on which the executable runs, unless that + component itself accompanies the executable. + + If distribution of executable or object code is made by offering access to + copy from a designated place, then offering equivalent access to copy the + source code from the same place counts as distribution of the source code, + even though third parties are not compelled to copy the source along with + the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program except as + expressly provided under this License. Any attempt otherwise to copy, + modify, sublicense or distribute the Program is void, and will automatically + terminate your rights under this License. However, parties who have received + copies, or rights, from you under this License will not have their licenses + terminated so long as such parties remain in full compliance. + + 5. You are not required to accept this License, since you have not signed + it. However, nothing else grants you permission to modify or distribute the + Program or its derivative works. These actions are prohibited by law if you + do not accept this License. Therefore, by modifying or distributing the + Program (or any work based on the Program), you indicate your acceptance of + this License to do so, and all its terms and conditions for copying, + distributing or modifying the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the + Program), the recipient automatically receives a license from the original + licensor to copy, distribute or modify the Program subject to these terms + and conditions. You may not impose any further restrictions on the + recipients' exercise of the rights granted herein. You are not responsible + for enforcing compliance by third parties to this License. + + 7. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot distribute so + as to satisfy simultaneously your obligations under this License and any + other pertinent obligations, then as a consequence you may not distribute + the Program at all. For example, if a patent license would not permit + royalty-free redistribution of the Program by all those who receive copies + directly or indirectly through you, then the only way you could satisfy both + it and this License would be to refrain entirely from distribution of the + Program. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply and + the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any patents + or other property right claims or to contest validity of any such claims; + this section has the sole purpose of protecting the integrity of the free + software distribution system, which is implemented by public license + practices. Many people have made generous contributions to the wide range of + software distributed through that system in reliance on consistent + application of that system; it is up to the author/donor to decide if he or + she is willing to distribute software through any other system and a + licensee cannot impose that choice. + + This section is intended to make thoroughly clear what is believed to be a + consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in certain + countries either by patents or by copyrighted interfaces, the original + copyright holder who places the Program under this License may add an + explicit geographical distribution limitation excluding those countries, so + that distribution is permitted only in or among countries not thus excluded. + In such case, this License incorporates the limitation as if written in the + body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions of + the General Public License from time to time. Such new versions will be + similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Program does not specify a version number of + this License, you may choose any version ever published by the Free Software + Foundation. + + 10. If you wish to incorporate parts of the Program into other free programs + whose distribution conditions are different, write to the author to ask for + permission. For software which is copyrighted by the Free Software + Foundation, write to the Free Software Foundation; we sometimes make + exceptions for this. Our decision will be guided by the two goals of + preserving the free status of all derivatives of our free software and of + promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR + THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO + THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM + PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR + CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR + THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible +use to the public, the best way to achieve this is to make it free software +which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach +them to the start of each source file to most effectively convey the exclusion +of warranty; and each file should have at least the "copyright" line and a +pointer to where the full notice is found. + +< one line to give the program's name and an idea of what it does. > + +Copyright (C) < yyyy > < name of author > + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 51 Franklin +Street, Fifth Floor, Boston, MA 02110-1301 , USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this when it +starts in an interactive mode: + +Gnomovision version 69, Copyright (C) year name of author Gnomovision comes +with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, +and you are welcome to redistribute it under certain conditions; type `show c' +for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may be +called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your school, +if any, to sign a "copyright disclaimer" for the program, if necessary. Here is +a sample; alter the names: + +Yoyodyne, Inc., hereby disclaims all copyright interest in the program +`Gnomovision' (which makes passes at compilers) written by James Hacker. + +< signature of Ty Coon > , 1 April 1989 Ty Coon, President of Vice This General +Public License does not permit incorporating your program into proprietary +programs. If your program is a subroutine library, you may consider it more +useful to permit linking proprietary applications with the library. If this is +what you want to do, use the GNU Lesser General Public License instead of this +License. + +------ + +** libx264; version 35fe20d1ba49918e -- https://code.videolan.org/videolan/x264 +Copyright (C) 2003-2021 x264 project +** miniLZO; version 2.08 -- http://www.oberhumer.com/opensource/lzo/ +LZO and miniLZO are Copyright (C) 1996-2014 Markus Franz Xaver Oberhumer +All Rights Reserved. +** The FreeType Project; version 2.13.0 -- +https://sourceforge.net/projects/freetype +Copyright (C) 1996-2020 by David Turner, Robert Wilhelm, and Werner Lemberg. +** X Drag and Drop; version 2000-08-08 -- +https://freedesktop.org/wiki/Specifications/XDND/ +xdnd.c, xdnd.h - C program library for handling the Xdnd protocol +Copyright (C) 1996-2000 Paul Sheer +** Xvid Mpeg-4 Video Codec; version 1.3.7 -- https://www.xvid.com/ +Project initiators: +Christoph Lampert +Michael Militzer +Peter Ross +** Zstandard; version 1.6.0 -- https://github.com/facebook/zstd +Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + +GNU GENERAL PUBLIC LICENSE + +Version 2, June 1991 + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. + +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 , USA + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to share +and change it. By contrast, the GNU General Public License is intended to +guarantee your freedom to share and change free software--to make sure the +software is free for all its users. This General Public License applies to most +of the Free Software Foundation's software and to any other program whose +authors commit to using it. (Some other Free Software Foundation software is +covered by the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our +General Public Licenses are designed to make sure that you have the freedom to +distribute copies of free software (and charge for this service if you wish), +that you receive source code or can get it if you want it, that you can change +the software or use pieces of it in new free programs; and that you know you +can do these things. + +To protect your rights, we need to make restrictions that forbid anyone to deny +you these rights or to ask you to surrender the rights. These restrictions +translate to certain responsibilities for you if you distribute copies of the +software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis or for +a fee, you must give the recipients all the rights that you have. You must make +sure that they, too, receive or can get the source code. And you must show them +these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and (2) +offer you this license which gives you legal permission to copy, distribute +and/or modify the software. + +Also, for each author's protection and ours, we want to make certain that +everyone understands that there is no warranty for this free software. If the +software is modified by someone else and passed on, we want its recipients to +know that what they have is not the original, so that any problems introduced +by others will not reflect on the original authors' reputations. + +Finally, any free program is threatened constantly by software patents. We wish +to avoid the danger that redistributors of a free program will individually +obtain patent licenses, in effect making the program proprietary. To prevent +this, we have made it clear that any patent must be licensed for everyone's +free use or not licensed at all. + +The precise terms and conditions for copying, distribution and modification +follow. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains a notice + placed by the copyright holder saying it may be distributed under the terms + of this General Public License. The "Program", below, refers to any such + program or work, and a "work based on the Program" means either the Program + or any derivative work under copyright law: that is to say, a work + containing the Program or a portion of it, either verbatim or with + modifications and/or translated into another language. (Hereinafter, + translation is included without limitation in the term "modification".) Each + licensee is addressed as "you". + + Activities other than copying, distribution and modification are not covered + by this License; they are outside its scope. The act of running the Program + is not restricted, and the output from the Program is covered only if its + contents constitute a work based on the Program (independent of having been + made by running the Program). Whether that is true depends on what the + Program does. + + 1. You may copy and distribute verbatim copies of the Program's source code + as you receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice and + disclaimer of warranty; keep intact all the notices that refer to this + License and to the absence of any warranty; and give any other recipients of + the Program a copy of this License along with the Program. + + You may charge a fee for the physical act of transferring a copy, and you + may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion of it, + thus forming a work based on the Program, and copy and distribute such + modifications or work under the terms of Section 1 above, provided that you + also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices stating + that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in whole + or in part contains or is derived from the Program or any part thereof, + to be licensed as a whole at no charge to all third parties under the + terms of this License. + + c) If the modified program normally reads commands interactively when + run, you must cause it, when started running for such interactive use in + the most ordinary way, to print or display an announcement including an + appropriate copyright notice and a notice that there is no warranty (or + else, saying that you provide a warranty) and that users may redistribute + the program under these conditions, and telling the user how to view a + copy of this License. (Exception: if the Program itself is interactive + but does not normally print such an announcement, your work based on the + Program is not required to print an announcement.) + + These requirements apply to the modified work as a whole. If identifiable + sections of that work are not derived from the Program, and can be + reasonably considered independent and separate works in themselves, then + this License, and its terms, do not apply to those sections when you + distribute them as separate works. But when you distribute the same sections + as part of a whole which is a work based on the Program, the distribution of + the whole must be on the terms of this License, whose permissions for other + licensees extend to the entire whole, and thus to each and every part + regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest your + rights to work written entirely by you; rather, the intent is to exercise + the right to control the distribution of derivative or collective works + based on the Program. + + In addition, mere aggregation of another work not based on the Program with + the Program (or with a work based on the Program) on a volume of a storage + or distribution medium does not bring the other work under the scope of this + License. + + 3. You may copy and distribute the Program (or a work based on it, under + Section 2) in object code or executable form under the terms of Sections 1 + and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable source + code, which must be distributed under the terms of Sections 1 and 2 above + on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three years, to + give any third party, for a charge no more than your cost of physically + performing source distribution, a complete machine-readable copy of the + corresponding source code, to be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer to + distribute corresponding source code. (This alternative is allowed only + for noncommercial distribution and only if you received the program in + object code or executable form with such an offer, in accord with + Subsection b above.) + + The source code for a work means the preferred form of the work for making + modifications to it. For an executable work, complete source code means all + the source code for all modules it contains, plus any associated interface + definition files, plus the scripts used to control compilation and + installation of the executable. However, as a special exception, the source + code distributed need not include anything that is normally distributed (in + either source or binary form) with the major components (compiler, kernel, + and so on) of the operating system on which the executable runs, unless that + component itself accompanies the executable. + + If distribution of executable or object code is made by offering access to + copy from a designated place, then offering equivalent access to copy the + source code from the same place counts as distribution of the source code, + even though third parties are not compelled to copy the source along with + the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program except as + expressly provided under this License. Any attempt otherwise to copy, + modify, sublicense or distribute the Program is void, and will automatically + terminate your rights under this License. However, parties who have received + copies, or rights, from you under this License will not have their licenses + terminated so long as such parties remain in full compliance. + + 5. You are not required to accept this License, since you have not signed + it. However, nothing else grants you permission to modify or distribute the + Program or its derivative works. These actions are prohibited by law if you + do not accept this License. Therefore, by modifying or distributing the + Program (or any work based on the Program), you indicate your acceptance of + this License to do so, and all its terms and conditions for copying, + distributing or modifying the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the + Program), the recipient automatically receives a license from the original + licensor to copy, distribute or modify the Program subject to these terms + and conditions. You may not impose any further restrictions on the + recipients' exercise of the rights granted herein. You are not responsible + for enforcing compliance by third parties to this License. + + 7. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot distribute so + as to satisfy simultaneously your obligations under this License and any + other pertinent obligations, then as a consequence you may not distribute + the Program at all. For example, if a patent license would not permit + royalty-free redistribution of the Program by all those who receive copies + directly or indirectly through you, then the only way you could satisfy both + it and this License would be to refrain entirely from distribution of the + Program. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply and + the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any patents + or other property right claims or to contest validity of any such claims; + this section has the sole purpose of protecting the integrity of the free + software distribution system, which is implemented by public license + practices. Many people have made generous contributions to the wide range of + software distributed through that system in reliance on consistent + application of that system; it is up to the author/donor to decide if he or + she is willing to distribute software through any other system and a + licensee cannot impose that choice. + + This section is intended to make thoroughly clear what is believed to be a + consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in certain + countries either by patents or by copyrighted interfaces, the original + copyright holder who places the Program under this License may add an + explicit geographical distribution limitation excluding those countries, so + that distribution is permitted only in or among countries not thus excluded. + In such case, this License incorporates the limitation as if written in the + body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions of + the General Public License from time to time. Such new versions will be + similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Program does not specify a version number of + this License, you may choose any version ever published by the Free Software + Foundation. + + 10. If you wish to incorporate parts of the Program into other free programs + whose distribution conditions are different, write to the author to ask for + permission. For software which is copyrighted by the Free Software + Foundation, write to the Free Software Foundation; we sometimes make + exceptions for this. Our decision will be guided by the two goals of + preserving the free status of all derivatives of our free software and of + promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR + THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO + THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM + PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR + CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR + THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible +use to the public, the best way to achieve this is to make it free software +which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach +them to the start of each source file to most effectively convey the exclusion +of warranty; and each file should have at least the "copyright" line and a +pointer to where the full notice is found. + + + +Copyright (C) + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 51 Franklin +Street, Fifth Floor, Boston, MA 02110-1301 , USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this when it +starts in an interactive mode: + +Gnomovision version 69, Copyright (C) year name of author Gnomovision comes +with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, +and you are welcome to redistribute it under certain conditions; type `show c' +for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may be +called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your school, +if any, to sign a "copyright disclaimer" for the program, if necessary. Here is +a sample; alter the names: + +Yoyodyne, Inc., hereby disclaims all copyright interest in the program +`Gnomovision' (which makes passes at compilers) written by James Hacker. + +< signature of Ty Coon > , 1 April 1989 Ty Coon, President of Vice This General +Public License does not permit incorporating your program into proprietary +programs. If your program is a subroutine library, you may consider it more +useful to permit linking proprietary applications with the library. If this is +what you want to do, use the GNU Lesser General Public License instead of this +License. + +------ + +** Eigen, template library for linear algebra: matrices, vectors, numerical +solvers, and related algorithms; version 3.2.7 -- +http://eigen.tuxfamily.org/index.php?title=Main_Page +Copyright (C) 2008-2010 Gael Guennebaud , Copyright +(C) 2006-2008 Benoit Jacob + +GNU GENERAL PUBLIC LICENSE + +Version 3, 29 June 2007 + +Copyright © 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +Preamble + +The GNU General Public License is a free, copyleft license for software and +other kinds of works. + +The licenses for most software and other practical works are designed to take +away your freedom to share and change the works. By contrast, the GNU General +Public License is intended to guarantee your freedom to share and change all +versions of a program--to make sure it remains free software for all its users. +We, the Free Software Foundation, use the GNU General Public License for most +of our software; it applies also to any other work released this way by its +authors. You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our +General Public Licenses are designed to make sure that you have the freedom to +distribute copies of free software (and charge for them if you wish), that you +receive source code or can get it if you want it, that you can change the +software or use pieces of it in new free programs, and that you know you can do +these things. + +To protect your rights, we need to prevent others from denying you these rights +or asking you to surrender the rights. Therefore, you have certain +responsibilities if you distribute copies of the software, or if you modify it: +responsibilities to respect the freedom of others. + +For example, if you distribute copies of such a program, whether gratis or for +a fee, you must pass on to the recipients the same freedoms that you received. +You must make sure that they, too, receive or can get the source code. And you +must show them these terms so they know their rights. + +Developers that use the GNU GPL protect your rights with two steps: (1) assert +copyright on the software, and (2) offer you this License giving you legal +permission to copy, distribute and/or modify it. + +For the developers' and authors' protection, the GPL clearly explains that +there is no warranty for this free software. For both users' and authors' sake, +the GPL requires that modified versions be marked as changed, so that their +problems will not be attributed erroneously to authors of previous versions. + +Some devices are designed to deny users access to install or run modified +versions of the software inside them, although the manufacturer can do so. This +is fundamentally incompatible with the aim of protecting users' freedom to +change the software. The systematic pattern of such abuse occurs in the area of +products for individuals to use, which is precisely where it is most +unacceptable. Therefore, we have designed this version of the GPL to prohibit +the practice for those products. If such problems arise substantially in other +domains, we stand ready to extend this provision to those domains in future +versions of the GPL, as needed to protect the freedom of users. + +Finally, every program is threatened constantly by software patents. States +should not allow patents to restrict development and use of software on +general-purpose computers, but in those that do, we wish to avoid the special +danger that patents applied to a free program could make it effectively +proprietary. To prevent this, the GPL assures that patents cannot be used to +render the program non-free. + +The precise terms and conditions for copying, distribution and modification +follow. + +TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this License. + Each licensee is addressed as "you". "Licensees" and "recipients" may be + individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work in a + fashion requiring copyright permission, other than the making of an exact + copy. The resulting work is called a "modified version" of the earlier work + or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based on the + Program. + + To "propagate" a work means to do anything with it that, without permission, + would make you directly or secondarily liable for infringement under + applicable copyright law, except executing it on a computer or modifying a + private copy. Propagation includes copying, distribution (with or without + modification), making available to the public, and in some countries other + activities as well. + + To "convey" a work means any kind of propagation that enables other parties + to make or receive copies. Mere interaction with a user through a computer + network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties are + provided), that licensees may convey the work under this License, and how to + view a copy of this License. If the interface presents a list of user + commands or options, such as a menu, a prominent item in the list meets this + criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work for making + modifications to it. "Object code" means any non-source form of a work. + + A "Standard Interface" means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other than + the work as a whole, that (a) is included in the normal form of packaging a + Major Component, but which is not part of that Major Component, and (b) + serves only to enable use of the work with that Major Component, or to + implement a Standard Interface for which an implementation is available to + the public in source code form. A "Major Component", in this context, means + a major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or a + compiler used to produce the work, or an object code interpreter used to run + it. + + The "Corresponding Source" for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control those + activities. However, it does not include the work's System Libraries, or + general-purpose tools or generally available free programs which are used + unmodified in performing those activities but which are not part of the + work. For example, Corresponding Source includes interface definition files + associated with source files for the work, and the source code for shared + libraries and dynamically linked subprograms that the work is specifically + designed to require, such as by intimate data communication or control flow + between those subprograms and other parts of the work. + + The Corresponding Source need not include anything that users can regenerate + automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of copyright + on the Program, and are irrevocable provided the stated conditions are met. + This License explicitly affirms your unlimited permission to run the + unmodified Program. The output from running a covered work is covered by + this License only if the output, given its content, constitutes a covered + work. This License acknowledges your rights of fair use or other equivalent, + as provided by copyright law. + + You may make, run and propagate covered works that you do not convey, + without conditions so long as your license otherwise remains in force. You + may convey covered works to others for the sole purpose of having them make + modifications exclusively for you, or provide you with facilities for + running those works, provided that you comply with the terms of this License + in conveying all material for which you do not control copyright. Those thus + making or running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them from + making any copies of your copyrighted material outside their relationship + with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological measure + under any applicable law fulfilling obligations under article 11 of the WIPO + copyright treaty adopted on 20 December 1996, or similar laws prohibiting or + restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the covered + work, and you disclaim any intention to limit operation or modification of + the work as a means of enforcing, against the work's users, your or third + parties' legal rights to forbid circumvention of technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you receive + it, in any medium, provided that you conspicuously and appropriately publish + on each copy an appropriate copyright notice; keep intact all notices + stating that this License and any non-permissive terms added in accord with + section 7 apply to the code; keep intact all notices of the absence of any + warranty; and give all recipients a copy of this License along with the + Program. + + You may charge any price or no price for each copy that you convey, and you + may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to produce + it from the Program, in the form of source code under the terms of section + 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to "keep intact all + notices". + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the whole + of the work, and all its parts, regardless of how they are packaged. This + License gives no permission to license the work in any other way, but it + does not invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work need + not make them do so. + + A compilation of a covered work with other separate and independent works, + which are not by their nature extensions of the covered work, and which are + not combined with it such as to form a larger program, in or on a volume of + a storage or distribution medium, is called an "aggregate" if the + compilation and its resulting copyright are not used to limit the access or + legal rights of the compilation's users beyond what the individual works + permit. Inclusion of a covered work in an aggregate does not cause this + License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily used + for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you offer + spare parts or customer support for that product model, to give anyone + who possesses the object code either (1) a copy of the Corresponding + Source for all the software in the product that is covered by this + License, on a durable physical medium customarily used for software + interchange, for a price no more than your reasonable cost of physically + performing this conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the written + offer to provide the Corresponding Source. This alternative is allowed + only occasionally and noncommercially, and only if you received the + object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no further + charge. You need not require recipients to copy the Corresponding Source + along with the object code. If the place to copy the object code is a + network server, the Corresponding Source may be on a different server + (operated by you or a third party) that supports equivalent copying + facilities, provided you maintain clear directions next to the object + code saying where to find the Corresponding Source. Regardless of what + server hosts the Corresponding Source, you remain obligated to ensure + that it is available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of the + work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded from + the Corresponding Source as a System Library, need not be included in + conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any + tangible personal property which is normally used for personal, family, or + household purposes, or (2) anything designed or sold for incorporation into + a dwelling. In determining whether a product is a consumer product, doubtful + cases shall be resolved in favor of coverage. For a particular product + received by a particular user, "normally used" refers to a typical or common + use of that class of product, regardless of the status of the particular + user or of the way in which the particular user actually uses, or expects or + is expected to use, the product. A product is a consumer product regardless + of whether the product has substantial commercial, industrial or + non-consumer uses, unless such uses represent the only significant mode of + use of the product. + + "Installation Information" for a User Product means any methods, procedures, + authorization keys, or other information required to install and execute + modified versions of a covered work in that User Product from a modified + version of its Corresponding Source. The information must suffice to ensure + that the continued functioning of the modified object code is in no case + prevented or interfered with solely because modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part of + a transaction in which the right of possession and use of the User Product + is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the Installation + Information. But this requirement does not apply if neither you nor any + third party retains the ability to install modified object code on the User + Product (for example, the work has been installed in ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates for + a work that has been modified or installed by the recipient, or for the User + Product in which it has been modified or installed. Access to a network may + be denied when the modification itself materially and adversely affects the + operation of the network or violates the rules and protocols for + communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this License + by making exceptions from one or more of its conditions. Additional + permissions that are applicable to the entire Program shall be treated as + though they were included in this License, to the extent that they are valid + under applicable law. If additional permissions apply only to part of the + Program, that part may be used separately under those permissions, but the + entire Program remains governed by this License without regard to the + additional permissions. + + When you convey a copy of a covered work, you may at your option remove any + additional permissions from that copy, or from any part of it. (Additional + permissions may be written to require their own removal in certain cases + when you modify the work.) You may place additional permissions on material, + added by you to a covered work, for which you have or can give appropriate + copyright permission. + + Notwithstanding any other provision of this License, for material you add to + a covered work, you may (if authorized by the copyright holders of that + material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the terms + of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or author + attributions in that material or in the Appropriate Legal Notices + displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in reasonable + ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material by + anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any liability + that these contractual assumptions directly impose on those licensors and + authors. + + All other non-permissive additional terms are considered "further + restrictions" within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you may + add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the form of + a separately written license, or stated as exceptions; the above + requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly provided + under this License. Any attempt otherwise to propagate or modify it is void, + and will automatically terminate your rights under this License (including + any patent licenses granted under the third paragraph of section 11). + + However, if you cease all violation of this License, then your license from + a particular copyright holder is reinstated (a) provisionally, unless and + until the copyright holder explicitly and finally terminates your license, + and (b) permanently, if the copyright holder fails to notify you of the + violation by some reasonable means prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and you + cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under this + License. If your rights have been terminated and not permanently reinstated, + you do not qualify to receive new licenses for the same material under + section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a copy + likewise does not require acceptance. However, nothing other than this + License grants you permission to propagate or modify any covered work. These + actions infringe copyright if you do not accept this License. Therefore, by + modifying or propagating a covered work, you indicate your acceptance of + this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives a + license from the original licensors, to run, modify and propagate that work, + subject to this License. You are not responsible for enforcing compliance by + third parties with this License. + + An "entity transaction" is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work the + party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can get + it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted under + this License, and you may not initiate litigation (including a cross-claim + or counterclaim in a lawsuit) alleging that any patent claim is infringed by + making, using, selling, offering for sale, or importing the Program or any + portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this License + of the Program or a work on which the Program is based. The work thus + licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this License, + of making, using, or selling its contributor version, but do not include + claims that would be infringed only as a consequence of further modification + of the contributor version. For purposes of this definition, "control" + includes the right to grant patent sublicenses in a manner consistent with + the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free patent + license under the contributor's essential patent claims, to make, use, sell, + offer for sale, import and otherwise run, modify and propagate the contents + of its contributor version. + + In the following three paragraphs, a "patent license" is any express + agreement or commitment, however denominated, not to enforce a patent (such + as an express permission to practice a patent or covenant not to sue for + patent infringement). To "grant" such a patent license to a party means to + make such an agreement or commitment not to enforce a patent against the + party. + + If you convey a covered work, knowingly relying on a patent license, and the + Corresponding Source of the work is not available for anyone to copy, free + of charge and under the terms of this License, through a publicly available + network server or other readily accessible means, then you must either (1) + cause the Corresponding Source to be so available, or (2) arrange to deprive + yourself of the benefit of the patent license for this particular work, or + (3) arrange, in a manner consistent with the requirements of this License, + to extend the patent license to downstream recipients. "Knowingly relying" + means you have actual knowledge that, but for the patent license, your + conveying the covered work in a country, or your recipient's use of the + covered work in a country, would infringe one or more identifiable patents + in that country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or arrangement, + you convey, or propagate by procuring conveyance of, a covered work, and + grant a patent license to some of the parties receiving the covered work + authorizing them to use, propagate, modify or convey a specific copy of the + covered work, then the patent license you grant is automatically extended to + all recipients of the covered work and works based on it. + + A patent license is "discriminatory" if it does not include within the scope + of its coverage, prohibits the exercise of, or is conditioned on the + non-exercise of one or more of the rights that are specifically granted + under this License. You may not convey a covered work if you are a party to + an arrangement with a third party that is in the business of distributing + software, under which you make payment to the third party based on the + extent of your activity of conveying the work, and under which the third + party grants, to any of the parties who would receive the covered work from + you, a discriminatory patent license (a) in connection with copies of the + covered work conveyed by you (or copies made from those copies), or (b) + primarily for and in connection with specific products or compilations that + contain the covered work, unless you entered into that arrangement, or that + patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot convey a + covered work so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you may + not convey it at all. For example, if you agree to terms that obligate you + to collect a royalty for further conveying from those to whom you convey the + Program, the only way you could satisfy both those terms and this License + would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have permission to + link or combine any covered work with a work licensed under version 3 of the + GNU Affero General Public License into a single combined work, and to convey + the resulting work. The terms of this License will continue to apply to the + part which is the covered work, but the special requirements of the GNU + Affero General Public License, section 13, concerning interaction through a + network will apply to the combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of the + GNU General Public License from time to time. Such new versions will be + similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the GNU General Public License + "or any later version" applies to it, you have the option of following the + terms and conditions either of that numbered version or of any later version + published by the Free Software Foundation. If the Program does not specify a + version number of the GNU General Public License, you may choose any version + ever published by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future versions of + the GNU General Public License can be used, that proxy's public statement of + acceptance of a version permanently authorizes you to choose that version + for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE + LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR + OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, + EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE + ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. + SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY + SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL + ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE + PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY + GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE + OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA + OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD + PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), + EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF + SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible +use to the public, the best way to achieve this is to make it free software +which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach +them to the start of each source file to most effectively state the exclusion +of warranty; and each file should have at least the "copyright" line and a +pointer to where the full notice is found. + + + +Copyright (C) + +This program is free software: you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation, either version 3 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If the program does terminal interaction, make it output a short notice like +this when it starts in an interactive mode: + + Copyright (C) + +This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + +This is free software, and you are welcome to redistribute it under certain +conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands might +be different; for a GUI interface, you would use an "about box". + +You should also get your employer (if you work as a programmer) or school, if +any, to sign a "copyright disclaimer" for the program, if necessary. For more +information on this, and how to apply and follow the GNU GPL, see . + +The GNU General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may consider +it more useful to permit linking proprietary applications with the library. If +this is what you want to do, use the GNU Lesser General Public License instead +of this License. But first, please read . + +------ + +** Fribidi ; version 1.0.12 -- https://github.com/fribidi/fribidi +Behdad Esfahbod +# +# Behdad Esfahbod maintained the entire 0.19 series. He designed, and +# implemented most of what is in FriBidi today. +# + +Dov Grobgeld +# +# Dov Grobgeld originally wrote FriBidi. The 0.1.* releases were all done +# by him. After the a long time of not being involved, Dov received +# back the maintenance of the package in time for the 1.0 release. +# He did the entire algorithmic work to support the changes made +# to the Unicode algorithm in the Unicode 6.3 standard. +# + +Roozbeh Pournader +# +# Roozbeh Pournader hasn't contributed much code to FriBidi personally; but +# has maintained, promoted, and supported the project for a while. He has +# helped with making GNU FriBidi standards compliant, and has sometimes +# lobbied with the Unicode Consortium when needed. Roozbeh was supposed to +# be a co-maintainer of GNU FriBidi, but he's not doing that yet. +# + +Khaled Hosny +# +# Khaled Hosny has done lots of cleanup and autoconfig work. + +# Note: Other people have contributed significant amounts of code, but +# usually the code has faded out because of restructuring and redesigning +# things around GNU FriBidi. As an example, the FriBidiEnv patch by Omer +# Zak, made itself into FriBidi CVS for a couple of years, but was finally +# implemented in a better way by Behdad. +# +# Note: GNU getopt is distributed with and used in GNU FriBidi under bin/, but +# is not part of GNU FriBidi. +# +# Note: Parts of the Unicode Character Database are distributed with and used +# in GNU FriBidi under gen.tab/unidata/, but are not part of GNU FriBidi. +# + + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + +------ + +** FFmpeg; version 6.0 -- http://ffmpeg.org/ +Copyright: The FFmpeg contributors +https://github.com/FFmpeg/FFmpeg/blob/master/CREDITS +** Libsndfile; version 1.1.0 -- http://libsndfile.github.io/libsndfile/ +Copyright (C) 2011-2016 Erik de Castro Lopo + +GNU LESSER GENERAL PUBLIC LICENSE + +Version 2.1, February 1999 + +Copyright (C) 1991, 1999 Free Software Foundation, Inc. + +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts as the +successor of the GNU Library Public License, version 2, hence the version +number 2.1.] + +Preamble + +The licenses for most software are designed to take away your freedom to share +and change it. By contrast, the GNU General Public Licenses are intended to +guarantee your freedom to share and change free software--to make sure the +software is free for all its users. + +This license, the Lesser General Public License, applies to some specially +designated software packages--typically libraries--of the Free Software +Foundation and other authors who decide to use it. You can use it too, but we +suggest you first think carefully about whether this license or the ordinary +General Public License is the better strategy to use in any particular case, +based on the explanations below. + +When we speak of free software, we are referring to freedom of use, not price. +Our General Public Licenses are designed to make sure that you have the freedom +to distribute copies of free software (and charge for this service if you +wish); that you receive source code or can get it if you want it; that you can +change the software and use pieces of it in new free programs; and that you are +informed that you can do these things. + +To protect your rights, we need to make restrictions that forbid distributors +to deny you these rights or to ask you to surrender these rights. These +restrictions translate to certain responsibilities for you if you distribute +copies of the library or if you modify it. + +For example, if you distribute copies of the library, whether gratis or for a +fee, you must give the recipients all the rights that we gave you. You must +make sure that they, too, receive or can get the source code. If you link other +code with the library, you must provide complete object files to the +recipients, so that they can relink them with the library after making changes +to the library and recompiling it. And you must show them these terms so they +know their rights. + +We protect your rights with a two-step method: (1) we copyright the library, +and (2) we offer you this license, which gives you legal permission to copy, +distribute and/or modify the library. + +To protect each distributor, we want to make it very clear that there is no +warranty for the free library. Also, if the library is modified by someone else +and passed on, the recipients should know that what they have is not the +original version, so that the original author's reputation will not be affected +by problems that might be introduced by others. + +Finally, software patents pose a constant threat to the existence of any free +program. We wish to make sure that a company cannot effectively restrict the +users of a free program by obtaining a restrictive license from a patent +holder. Therefore, we insist that any patent license obtained for a version of +the library must be consistent with the full freedom of use specified in this +license. + +Most GNU software, including some libraries, is covered by the ordinary GNU +General Public License. This license, the GNU Lesser General Public License, +applies to certain designated libraries, and is quite different from the +ordinary General Public License. We use this license for certain libraries in +order to permit linking those libraries into non-free programs. + +When a program is linked with a library, whether statically or using a shared +library, the combination of the two is legally speaking a combined work, a +derivative of the original library. The ordinary General Public License +therefore permits such linking only if the entire combination fits its criteria +of freedom. The Lesser General Public License permits more lax criteria for +linking other code with the library. + +We call this license the "Lesser" General Public License because it does Less +to protect the user's freedom than the ordinary General Public License. It also +provides other free software developers Less of an advantage over competing +non-free programs. These disadvantages are the reason we use the ordinary +General Public License for many libraries. However, the Lesser license provides +advantages in certain special circumstances. + +For example, on rare occasions, there may be a special need to encourage the +widest possible use of a certain library, so that it becomes a de-facto +standard. To achieve this, non-free programs must be allowed to use the +library. A more frequent case is that a free library does the same job as +widely used non-free libraries. In this case, there is little to gain by +limiting the free library to free software only, so we use the Lesser General +Public License. + +In other cases, permission to use a particular library in non-free programs +enables a greater number of people to use a large body of free software. For +example, permission to use the GNU C Library in non-free programs enables many +more people to use the whole GNU operating system, as well as its variant, the +GNU/Linux operating system. + +Although the Lesser General Public License is Less protective of the users' +freedom, it does ensure that the user of a program that is linked with the +Library has the freedom and the wherewithal to run that program using a +modified version of the Library. + +The precise terms and conditions for copying, distribution and modification +follow. Pay close attention to the difference between a "work based on the +library" and a "work that uses the library". The former contains code derived +from the library, whereas the latter must be combined with the library in order +to run. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other program + which contains a notice placed by the copyright holder or other authorized + party saying it may be distributed under the terms of this Lesser General + Public License (also called "this License"). Each licensee is addressed as + "you". + + A "library" means a collection of software functions and/or data prepared so + as to be conveniently linked with application programs (which use some of + those functions and data) to form executables. + + The "Library", below, refers to any such software library or work which has + been distributed under these terms. A "work based on the Library" means + either the Library or any derivative work under copyright law: that is to + say, a work containing the Library or a portion of it, either verbatim or + with modifications and/or translated straightforwardly into another + language. (Hereinafter, translation is included without limitation in the + term "modification".) + + "Source code" for a work means the preferred form of the work for making + modifications to it. For a library, complete source code means all the + source code for all modules it contains, plus any associated interface + definition files, plus the scripts used to control compilation and + installation of the library. + + Activities other than copying, distribution and modification are not covered + by this License; they are outside its scope. The act of running a program + using the Library is not restricted, and output from such a program is + covered only if its contents constitute a work based on the Library + (independent of the use of the Library in a tool for writing it). Whether + that is true depends on what the Library does and what the program that uses + the Library does. + + 1. You may copy and distribute verbatim copies of the Library's complete + source code as you receive it, in any medium, provided that you + conspicuously and appropriately publish on each copy an appropriate + copyright notice and disclaimer of warranty; keep intact all the notices + that refer to this License and to the absence of any warranty; and + distribute a copy of this License along with the Library. + + You may charge a fee for the physical act of transferring a copy, and you + may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Library or any portion of it, + thus forming a work based on the Library, and copy and distribute such + modifications or work under the terms of Section 1 above, provided that you + also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices stating + that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no charge to + all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a table + of data to be supplied by an application program that uses the facility, + other than as an argument passed when the facility is invoked, then you + must make a good faith effort to ensure that, in the event an application + does not supply such function or table, the facility still operates, and + performs whatever part of its purpose remains meaningful. + + (For example, a function in a library to compute square roots has a purpose + that is entirely well-defined independent of the application. Therefore, + Subsection 2d requires that any application-supplied function or table used + by this function must be optional: if the application does not supply it, + the square root function must still compute square roots.) + + These requirements apply to the modified work as a whole. If identifiable + sections of that work are not derived from the Library, and can be + reasonably considered independent and separate works in themselves, then + this License, and its terms, do not apply to those sections when you + distribute them as separate works. But when you distribute the same sections + as part of a whole which is a work based on the Library, the distribution of + the whole must be on the terms of this License, whose permissions for other + licensees extend to the entire whole, and thus to each and every part + regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest your + rights to work written entirely by you; rather, the intent is to exercise + the right to control the distribution of derivative or collective works + based on the Library. + + In addition, mere aggregation of another work not based on the Library with + the Library (or with a work based on the Library) on a volume of a storage + or distribution medium does not bring the other work under the scope of this + License. + + 3. You may opt to apply the terms of the ordinary GNU General Public License + instead of this License to a given copy of the Library. To do this, you must + alter all the notices that refer to this License, so that they refer to the + ordinary GNU General Public License, version 2, instead of to this License. + (If a newer version than version 2 of the ordinary GNU General Public + License has appeared, then you can specify that version instead if you + wish.) Do not make any other change in these notices. + + Once this change is made in a given copy, it is irreversible for that copy, + so the ordinary GNU General Public License applies to all subsequent copies + and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of the Library + into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or derivative of + it, under Section 2) in object code or executable form under the terms of + Sections 1 and 2 above provided that you accompany it with the complete + corresponding machine-readable source code, which must be distributed under + the terms of Sections 1 and 2 above on a medium customarily used for + software interchange. + + If distribution of object code is made by offering access to copy from a + designated place, then offering equivalent access to copy the source code + from the same place satisfies the requirement to distribute the source code, + even though third parties are not compelled to copy the source along with + the object code. + + 5. A program that contains no derivative of any portion of the Library, but + is designed to work with the Library by being compiled or linked with it, is + called a "work that uses the Library". Such a work, in isolation, is not a + derivative work of the Library, and therefore falls outside the scope of + this License. + + However, linking a "work that uses the Library" with the Library creates an + executable that is a derivative of the Library (because it contains portions + of the Library), rather than a "work that uses the library". The executable + is therefore covered by this License. Section 6 states terms for + distribution of such executables. + + When a "work that uses the Library" uses material from a header file that is + part of the Library, the object code for the work may be a derivative work + of the Library even though the source code is not. Whether this is true is + especially significant if the work can be linked without the Library, or if + the work is itself a library. The threshold for this to be true is not + precisely defined by law. + + If such an object file uses only numerical parameters, data structure + layouts and accessors, and small macros and small inline functions (ten + lines or less in length), then the use of the object file is unrestricted, + regardless of whether it is legally a derivative work. (Executables + containing this object code plus portions of the Library will still fall + under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may distribute + the object code for the work under the terms of Section 6. Any executables + containing that work also fall under Section 6, whether or not they are + linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or link a + "work that uses the Library" with the Library to produce a work containing + portions of the Library, and distribute that work under terms of your + choice, provided that the terms permit modification of the work for the + customer's own use and reverse engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the Library + is used in it and that the Library and its use are covered by this License. + You must supply a copy of this License. If the work during execution + displays copyright notices, you must include the copyright notice for the + Library among them, as well as a reference directing the user to the copy of + this License. Also, you must do one of these things: + + a) Accompany the work with the complete corresponding machine-readable + source code for the Library including whatever changes were used in the + work (which must be distributed under Sections 1 and 2 above); and, if + the work is an executable linked with the Library, with the complete + machine-readable "work that uses the Library", as object code and/or + source code, so that the user can modify the Library and then relink to + produce a modified executable containing the modified Library. (It is + understood that the user who changes the contents of definitions files in + the Library will not necessarily be able to recompile the application to + use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the Library. + A suitable mechanism is one that (1) uses at run time a copy of the + library already present on the user's computer system, rather than + copying library functions into the executable, and (2) will operate + properly with a modified version of the library, if the user installs + one, as long as the modified version is interface-compatible with the + version that the work was made with. + + c) Accompany the work with a written offer, valid for at least three + years, to give the same user the materials specified in Subsection 6a, + above, for a charge no more than the cost of performing this + distribution. + + d) If distribution of the work is made by offering access to copy from a + designated place, offer equivalent access to copy the above specified + materials from the same place. + + e) Verify that the user has already received a copy of these materials or + that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the Library" + must include any data and utility programs needed for reproducing the + executable from it. However, as a special exception, the materials to be + distributed need not include anything that is normally distributed (in + either source or binary form) with the major components (compiler, kernel, + and so on) of the operating system on which the executable runs, unless that + component itself accompanies the executable. + + It may happen that this requirement contradicts the license restrictions of + other proprietary libraries that do not normally accompany the operating + system. Such a contradiction means you cannot use both them and the Library + together in an executable that you distribute. + + 7. You may place library facilities that are a work based on the Library + side-by-side in a single library together with other library facilities not + covered by this License, and distribute such a combined library, provided + that the separate distribution of the work based on the Library and of the + other library facilities is otherwise permitted, and provided that you do + these two things: + + a) Accompany the combined library with a copy of the same work based on + the Library, uncombined with any other library facilities. This must be + distributed under the terms of the Sections above. + + b) Give prominent notice with the combined library of the fact that part + of it is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute the + Library except as expressly provided under this License. Any attempt + otherwise to copy, modify, sublicense, link with, or distribute the Library + is void, and will automatically terminate your rights under this License. + However, parties who have received copies, or rights, from you under this + License will not have their licenses terminated so long as such parties + remain in full compliance. + + 9. You are not required to accept this License, since you have not signed + it. However, nothing else grants you permission to modify or distribute the + Library or its derivative works. These actions are prohibited by law if you + do not accept this License. Therefore, by modifying or distributing the + Library (or any work based on the Library), you indicate your acceptance of + this License to do so, and all its terms and conditions for copying, + distributing or modifying the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the + Library), the recipient automatically receives a license from the original + licensor to copy, distribute, link with or modify the Library subject to + these terms and conditions. You may not impose any further restrictions on + the recipients' exercise of the rights granted herein. You are not + responsible for enforcing compliance by third parties with this License. + + 11. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot distribute so + as to satisfy simultaneously your obligations under this License and any + other pertinent obligations, then as a consequence you may not distribute + the Library at all. For example, if a patent license would not permit + royalty-free redistribution of the Library by all those who receive copies + directly or indirectly through you, then the only way you could satisfy both + it and this License would be to refrain entirely from distribution of the + Library. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply, + and the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any patents + or other property right claims or to contest validity of any such claims; + this section has the sole purpose of protecting the integrity of the free + software distribution system which is implemented by public license + practices. Many people have made generous contributions to the wide range of + software distributed through that system in reliance on consistent + application of that system; it is up to the author/donor to decide if he or + she is willing to distribute software through any other system and a + licensee cannot impose that choice. + + This section is intended to make thoroughly clear what is believed to be a + consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in certain + countries either by patents or by copyrighted interfaces, the original + copyright holder who places the Library under this License may add an + explicit geographical distribution limitation excluding those countries, so + that distribution is permitted only in or among countries not thus excluded. + In such case, this License incorporates the limitation as if written in the + body of this License. + + 13. The Free Software Foundation may publish revised and/or new versions of + the Lesser General Public License from time to time. Such new versions will + be similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Library + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Library does not specify a license version + number, you may choose any version ever published by the Free Software + Foundation. + + 14. If you wish to incorporate parts of the Library into other free programs + whose distribution conditions are incompatible with these, write to the + author to ask for permission. For software which is copyrighted by the Free + Software Foundation, write to the Free Software Foundation; we sometimes + make exceptions for this. Our decision will be guided by the two goals of + preserving the free status of all derivatives of our free software and of + promoting the sharing and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR + THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO + THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY + PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR + CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR + THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER + SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Libraries + +If you develop a new library, and you want it to be of the greatest possible +use to the public, we recommend making it free software that everyone can +redistribute and change. You can do so by permitting redistribution under these +terms (or, alternatively, under the terms of the ordinary General Public +License). + +To apply these terms, attach the following notices to the library. It is safest +to attach them to the start of each source file to most effectively convey the +exclusion of warranty; and each file should have at least the "copyright" line +and a pointer to where the full notice is found. + + + +Copyright (C) + +This library is free software; you can redistribute it and/or modify it under +the terms of the GNU Lesser General Public License as published by the Free +Software Foundation; either version 2.1 of the License, or (at your option) any +later version. + +This library is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License along +with this library; if not, write to the Free Software Foundation, Inc., 51 +Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your school, +if any, to sign a "copyright disclaimer" for the library, if necessary. Here is +a sample; alter the names: + +Yoyodyne, Inc., hereby disclaims all copyright interest in + +the library `Frob' (a library for tweaking knobs) written + +by James Random Hacker. + +< signature of Ty Coon > , 1 April 1990 + +Ty Coon, President of Vice + +That's all there is to it! + +------ + +** LIBPNG; version 1.6.37 -- http://prdownloads.sourceforge.net/libpng +* Copyright (c) 1995-2019 The PNG Reference Library Authors. + * Copyright (c) 2018-2019 Cosmin Truta. + * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson. + * Copyright (c) 1996-1997 Andreas Dilger. + * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +This copy of the libpng notices is provided for your convenience. In case of +any discrepancy between this copy and the notices in the file png.h that is +included in the libpng distribution, the latter shall prevail. + +COPYRIGHT NOTICE, DISCLAIMER, and LICENSE: + +If you modify libpng you may insert additional notices immediately following +this sentence. + +This code is released under the libpng license. + +libpng versions 1.2.6, August 15, 2004, through 1.4.5, December 9, 2010, are +Copyright (c) 2004, 2006-2010 Glenn Randers-Pehrson, and are distributed +according to the same disclaimer and license as libpng-1.2.5 with the following +individual added to the list of Contributing Authors + +Cosmin Truta + +libpng versions 1.0.7, July 1, 2000, through 1.2.5 - October 3, 2002, are + +Copyright (c) 2000-2002 Glenn Randers-Pehrson, and are distributed according to +the same disclaimer and license as libpng-1.0.6 with the following individuals +added to the list of Contributing Authors + +Simon-Pierre Cadieux + +Eric S. Raymond + +Gilles Vollant + +and with the following additions to the disclaimer: + +There is no warranty against interference with your enjoyment of the library or +against infringement. There is no warranty that our efforts or the library will +fulfill any of your particular purposes or needs. This library is provided with +all faults, and the entire risk of satisfactory quality, performance, accuracy, +and effort is with the user. + +libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are + +Copyright (c) 1998, 1999 Glenn Randers-Pehrson, and are distributed according +to the same disclaimer and license as libpng-0.96, with the following +individuals added to the list of Contributing Authors: + +Tom Lane + +Glenn Randers-Pehrson + +Willem van Schaik + +libpng versions 0.89, June 1996, through 0.96, May 1997, are + +Copyright (c) 1996, 1997 Andreas Digger + +Distributed according to the same disclaimer and license as libpng-0.88, with +the following individuals added to the list of Contributing Authors: + +John Bowler + +Kevin Bracey + +Sam Bushell + +Magnus Holmgren + +Greg Roelofs + +Tom Tanner + +libpng versions 0.5, May 1995, through 0.88, January 1996, are + +Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc. + +For the purposes of this copyright and license, "Contributing Authors" is +defined as the following set of individuals: + +Andreas Dilger + +Dave Martindale + +Guy Eric Schalnat + +Paul Schmidt + +Tim Wegner + +The PNG Reference Library is supplied "AS IS". The Contributing Authors and +Group 42, Inc. disclaim all warranties, expressed or implied, including, +without limitation, the warranties of merchantability and of fitness for any +purpose. The Contributing Authors and Group 42, Inc. assume no liability for +direct, indirect, incidental, special, exemplary, or consequential damages, +which may result from the use of the PNG Reference Library, even if advised of +the possibility of such damage. + +Permission is hereby granted to use, copy, modify, and distribute this source +code, or portions hereof, for any purpose, without fee, subject to the +following restrictions: + + 1. The origin of this source code must not be misrepresented. + + 2. Altered versions must be plainly marked as such and must not be + misrepresented as being the original source. + + 3. This Copyright notice may not be removed or altered from any source or + altered source distribution. + +The Contributing Authors and Group 42, Inc. specifically permit, without fee, +and encourage the use of this source code as a component to supporting the PNG +file format in commercial products. If you use this source code in a product, +acknowledgment is not required but would be appreciated. + +A "png_get_copyright" function is available, for convenient use in "about" +boxes and the like: + +printf("%s",png_get_copyright(NULL)); + +Also, the PNG logo (in PNG format, of course) is supplied in the files +"pngbar.png" and "pngbar.jpg (88x31) and "pngnow.png" (98x31). + +Libpng is OSI Certified Open Source Software. OSI Certified Open Source is a +certification mark of the Open Source Initiative. + +Glenn Randers-Pehrson + +glennrp at users.sourceforge.net + +December 9, 2010 + +------ + +** vcintrinsics; version 782fbf7301dc73acaa049a4324c976ad94f587f7 -- +https://github.com/intel/vc-intrinsics +Copyright (c) 2019 Intel Corporation + +MIT License + +Copyright (c) 2019 Intel Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------ + +** {fmt}; version 9.1.0 -- https://github.com/fmtlib/fmt +Copyright (c) 2012 - present, Victor Zverovich +** Brotli; version 1.0.9 -- https://github.com/google/brotli +Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. +** Epoxy; version 1.5.10 -- https://github.com/anholt/libepoxy +Copyright © 2013-2014 Intel Corporation. +Copyright © 2013 The Khronos Group Inc. +** Expat; version 2.5.0 -- https://github.com/libexpat/libexpat/ +Copyright (c) 1998-2000 Thai Open Source Software Center Ltd and Clark Cooper +Copyright (c) 2001-2019 Expat maintainers +** Intel(R) Graphics Compute Runtime; version 22.38.24278 -- +https://github.com/intel/compute-runtime +Copyright (C) 2021 Intel Corporation +** Intel(R) Graphics Memory Management Library; version 22.1.8 -- +https://github.com/intel/gmmlib +Copyright (c) 2017 Intel Corporation. +Copyright (c) 2016 Gabi Melman. +Copyright 2008, Google Inc. All rights reserved. +** JSON for Modern C++; version 3.10.2 -- https://github.com/nlohmann/json/ +Copyright (c) 2013-2021 Niels Lohmann +** libdecor; version 0.1.0 -- https://gitlab.freedesktop.org/libdecor/libdecor +Copyright © 2010 Intel Corporation +Copyright © 2011 Benjamin Franzke +Copyright © 2018-2021 Jonas Ådahl +Copyright © 2019 Christian Rauch +Copyright (c) 2006, 2008 Junio C Hamano +Copyright © 2017-2018 Red Hat Inc. +Copyright © 2012 Collabora, Ltd. +Copyright © 2008 Kristian Høgsberg +** Libxml2; version 2.10.4 -- http://xmlsoft.org/ +Copyright (C) 1998-2012 Daniel Veillard. All Rights Reserved. +** Mesa 3D; version 21.1.5 -- https://www.mesa3d.org/ +Copyright (C) 1999-2007 Brian Paul All Rights Reserved. +** oneAPI Level Zero; version v1.8.8 -- +https://github.com/oneapi-src/level-zero +Copyright (C) 2019-2021 Intel Corporation +** OPENCollada; version 1.6.68 -- https://github.com/KhronosGroup/OpenCOLLADA +Copyright (c) 2008-2009 NetAllied Systems GmbH +** PugiXML; version 1.10 -- http://pugixml.org/ +Copyright (c) 2006-2020 Arseny Kapoulkine +** QuadriFlow; version 27a6867 -- https://github.com/hjwdzh/QuadriFlow +Copyright (c) 2018 Jingwei Huang, Yichao Zhou, Matthias Niessner, +Jonathan Shewchuk and Leonidas Guibas. All rights reserved. +** robin-map; version 0.6.2 -- https://github.com/Tessil/robin-map +Copyright (c) 2017 Thibaut Goetghebuer-Planchon +** sse2neon; version 1.6.0 -- https://github.com/DLTcollab/sse2neon +Copyright sse2neon contributors +** TinyGLTF; version 2.5.0 -- https://github.com/syoyo/tinygltf +Copyright (c) 2017 Syoyo Fujita, Aurélien Chatelain and many contributors +** Wayland protocols; version 1.31 -- +https://gitlab.freedesktop.org/wayland/wayland-protocols +Copyright © 2008-2013 Kristian Høgsberg +Copyright © 2010-2013 Intel Corporation +Copyright © 2013 Rafael Antognolli +Copyright © 2013 Jasper St. Pierre +Copyright © 2014 Jonas Ådahl +Copyright © 2014 Jason Ekstrand +Copyright © 2014-2015 Collabora, Ltd. +Copyright © 2015 Red Hat Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------ + +** Harfbuzz; version 5.1.0 -- https://github.com/harfbuzz/harfbuzz +Copyright © 2010-2022 Google, Inc. +Copyright © 2015-2020 Ebrahim Byagowi +Copyright © 2019,2020 Facebook, Inc. +Copyright © 2012,2015 Mozilla Foundation +Copyright © 2011 Codethink Limited +Copyright © 2008,2010 Nokia Corporation and/or its subsidiary(-ies) +Copyright © 2009 Keith Stribley +Copyright © 2011 Martin Hosken and SIL International +Copyright © 2007 Chris Wilson +Copyright © 2005,2006,2020,2021,2022,2023 Behdad Esfahbod +Copyright © 2004,2007,2008,2009,2010,2013,2021,2022,2023 Red Hat, Inc. +Copyright © 1998-2005 David Turner and Werner Lemberg +Copyright © 2016 Igalia S.L. +Copyright © 2022 Matthias Clasen +Copyright © 2018,2021 Khaled Hosny +Copyright © 2018,2019,2020 Adobe, Inc +Copyright © 2013-2015 Alexei Podtelezhnikov + +HarfBuzz is licensed under the so-called "Old MIT" license. Details follow. +For parts of HarfBuzz that are licensed under different licenses see individual +files names COPYING in subdirectories where applicable. + +Copyright © 2010-2022 Google, Inc. +Copyright © 2015-2020 Ebrahim Byagowi +Copyright © 2019,2020 Facebook, Inc. +Copyright © 2012,2015 Mozilla Foundation +Copyright © 2011 Codethink Limited +Copyright © 2008,2010 Nokia Corporation and/or its subsidiary(-ies) +Copyright © 2009 Keith Stribley +Copyright © 2011 Martin Hosken and SIL International +Copyright © 2007 Chris Wilson +Copyright © 2005,2006,2020,2021,2022,2023 Behdad Esfahbod +Copyright © 2004,2007,2008,2009,2010,2013,2021,2022,2023 Red Hat, Inc. +Copyright © 1998-2005 David Turner and Werner Lemberg +Copyright © 2016 Igalia S.L. +Copyright © 2022 Matthias Clasen +Copyright © 2018,2021 Khaled Hosny +Copyright © 2018,2019,2020 Adobe, Inc +Copyright © 2013-2015 Alexei Podtelezhnikov + +For full copyright notices consult the individual files in the package. + + +Permission is hereby granted, without written agreement and without +license or royalty fees, to use, copy, modify, and distribute this +software and its documentation for any purpose, provided that the +above copyright notice and the following two paragraphs appear in +all copies of this software. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR +DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES +ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN +IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + +THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS +ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO +PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + +------ + +** OpenVDB; version 10.0.0 -- http://www.openvdb.org/ +Copyright Contributors to the OpenVDB Project + +Mozilla Public License Version 2.0 + + 1. Definitions + + 1.1. "Contributor" means each individual or legal entity that creates, + contributes to the creation of, or owns Covered Software. + + 1.2. "Contributor Version" means the combination of the Contributions of + others (if any) used by a Contributor and that particular Contributor's + Contribution. + + 1.3. "Contribution" means Covered Software of a particular Contributor. + + 1.4. "Covered Software" means Source Code Form to which the initial + Contributor has attached the notice in Exhibit A, the Executable Form of + such Source Code Form, and Modifications of such Source Code Form, in + each case including portions thereof. + + 1.5. "Incompatible With Secondary Licenses" means + + (a) that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + + 1.6. "Executable Form" means any form of the work other than Source Code + Form. + + 1.7. "Larger Work" means a work that combines Covered Software with other + material, in a separate file or files, that is not Covered Software. + + 1.8. "License" means this document. + + 1.9. "Licensable" means having the right to grant, to the maximum extent + possible, whether at the time of the initial grant or subsequently, any + and all of the rights conveyed by this License. + + 1.10. "Modifications" means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + + 1.11. "Patent Claims" of a Contributor means any patent claim(s), + including without limitation, method, process, and apparatus claims, in + any patent Licensable by such Contributor that would be infringed, but + for the grant of the License, by the making, using, selling, offering for + sale, having made, import, or transfer of either its Contributions or its + Contributor Version. + + 1.12. "Secondary License" means either the GNU General Public License, + Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU + Affero General Public License, Version 3.0, or any later versions of + those licenses. + + 1.13. "Source Code Form" means the form of the work preferred for making + modifications. + + 1.14. "You" (or "Your") means an individual or a legal entity exercising + rights under this License. For legal entities, "You" includes any entity + that controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct or + indirect, to cause the direction or management of such entity, whether by + contract or otherwise, or (b) ownership of more than fifty percent (50%) + of the outstanding shares or beneficial ownership of such entity. + + 2. License Grants and Conditions + + 2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + (a) under intellectual property rights (other than patent or + trademark) Licensable by such Contributor to use, reproduce, make + available, modify, display, perform, distribute, and otherwise exploit + its Contributions, either on an unmodified basis, with Modifications, + or as part of a Larger Work; and + + (b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + + 2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + + 2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + (a) for any code that a Contributor has removed from Covered Software; + or + + (b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + (c) under Patent Claims infringed by Covered Software in the absence + of its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + + 2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + + 2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + + 2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + + 2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + 3. Responsibilities + + 3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + + 3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + (a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more than + the cost of distribution to the recipient; and + + (b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + + 3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + + 3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + + 3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + + 4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this + License to the maximum extent possible; and (b) describe the limitations and + the code they affect. Such description must be placed in a text file + included with all distributions of the Covered Software under this License. + Except to the extent prohibited by statute or regulation, such description + must be sufficiently detailed for a recipient of ordinary skill to be able + to understand it. + + 5. Termination + + 5.1. The rights granted under this License will terminate automatically + if You fail to comply with any of its terms. However, if You become + compliant, then the rights granted under this License from a particular + Contributor are reinstated (a) provisionally, unless and until such + Contributor explicitly and finally terminates Your grants, and (b) on an + ongoing basis, if such Contributor fails to notify You of the + non-compliance by some reasonable means prior to 60 days after You have + come back into compliance. Moreover, Your grants from a particular + Contributor are reinstated on an ongoing basis if such Contributor + notifies You of the non-compliance by some reasonable means, this is the + first time You have received notice of non-compliance with this License + from such Contributor, and You become compliant prior to 30 days after + Your receipt of the notice. + + 5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + + 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end + user license agreements (excluding distributors and resellers) which have + been validly granted by You or Your distributors under this License prior + to termination shall survive termination. + + 6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of + this License. No use of any Covered Software is authorized under this + License except under this disclaimer. + + 7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + + 8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party's ability to bring cross-claims or counter-claims. + + 9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to + construe this License against a Contributor. + + 10. Versions of the License + + 10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + + 10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + + 10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + + 10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses + + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. Exhibit A + - Source Code Form License Notice + +This Source Code Form is subject to the terms of the Mozilla Public License, v. +2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + +This Source Code Form is "Incompatible With Secondary Licenses", as defined by +the Mozilla Public License, v. 2.0. + +------ + +** minizip-ng; version 3.0.7 -- https://github.com/zlib-ng/minizip-ng +Copyright (C) Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + +Condition of use and distribution are the same as zlib: + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgement in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +------ + +** Bullet Continuous Collision Detection and Physics Library; version 3.07 -- +http://continuousphysics.com/Bullet/ +Bullet Continuous Collision Detection and Physics Library +Copyright (c) 2003-2006 Erwin Coumans http://continuousphysics.com/Bullet/ +** Haru; version 2.3.0 -- http://libharu.org/ +Copyright 2000-2006 (c) Takeshi Kanno +Copyright 2007-2009 (c) Antony Dovgal et al. +** NanoSVG; version 3cdd4a9d788 -- https://github.com/memononen/nanosvg +Copyright (c) 2013-14 Mikko Mononen memon@inside.org +** SDL; version 2.0.20 -- https://www.libsdl.org +Copyright (C) 1997-2020 Sam Lantinga +** TinyXML; version 2.6.2 -- https://sourceforge.net/projects/tinyxml/ +Lee Thomason, Yves Berquin, Andrew Ellerton. +** zlib; version 1.2.13 -- https://zlib.net +Copyright (C) 1995-2017 Jean-loup Gailly + +zlib License Copyright (c) + +This software is provided 'as-is', without any express or implied warranty. In +no event will the authors be held liable for any damages arising from the use +of this software. + +Permission is granted to anyone to use this software for any purpose, including +commercial applications, and to alter it and redistribute it freely, subject to +the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software in a + product, an acknowledgment in the product documentation would be appreciated + but is not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. + +------ + +** LibTIFF; version 4.5.1 -- http://www.libtiff.org/ +Copyright (c) 1988-1997 Sam Leffler +Copyright (c) 1991-1997 Silicon Graphics, Inc. + +Copyright (c) 1988-1997 Sam Leffler + +Copyright (c) 1991-1997 Silicon Graphics, Inc. + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that (i) +the above copyright notices and this permission notice appear in all copies of +the software and related documentation, and (ii) the names of Sam Leffler and +Silicon Graphics may not be used in any advertising or publicity relating to +the software without the specific, prior written permission of Sam Leffler and +Silicon Graphics. + +THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, EXPRESS, +IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY WARRANTY OF +MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + +IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR ANY SPECIAL, +INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED +OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +------ + +** The LLVM Compiler Infrastructure; version 12.0.0 -- +https://github.com/llvm/llvm-project/ +University of Illinois/NCSA +Open Source License + +Copyright (c) 2003-2019 University of Illinois at Urbana-Champaign. +All rights reserved. + +---- LLVM Exceptions to the Apache 2.0 License ---- + +As an exception, if, as a result of your compiling your source code, portions +of this Software are embedded into an Object form of such source code, you +may redistribute such embedded portions in such Object form without complying +with the conditions of Sections 4(a), 4(b) and 4(d) of the License. + +In addition, if you combine or link compiled forms of this Software with +software that is licensed under the GPLv2 ("Combined Software") and if a +court of competent jurisdiction determines that the patent provision (Section +3), the indemnity provision (Section 9) or other Section of the License +conflicts with the conditions of the GPLv2, you may retroactively and +prospectively choose to deem waived or otherwise exclude such Section(s) of +the License, but only in their entirety and only with respect to the Combined +Software. + +------ + +** OpenSubdiv; version 3.5.0 -- http://graphics.pixar.com/opensubdiv +Copyright 2013 Pixar +** Universal Scene Description; version 23.05 -- http://www.openusd.org/ +Copyright 2016 Pixar + +Licensed under the Apache License, Version 2.0 (the "Apache License") with the +following modification; you may not use this file except in compliance with the +Apache License and the following modification to it: + +Section 6. Trademarks. is deleted and replaced with: + +6. Trademarks. This License does not grant permission to use the trade names, +trademarks, service marks, or product names of the Licensor and its affiliates, +except as required to comply with Section 4(c) of the License and to reproduce +the content of the NOTICE file. + +------ + +** libjpeg-turbo; version 2.1.3 -- +https://github.com/libjpeg-turbo/libjpeg-turbo/ +Copyright (C)2009-2020 D. R. Commander. All Rights Reserved. +Copyright (C)2015 Viktor Szathmáry. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. +Neither the name of the libjpeg-turbo Project nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS", +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** Boost C++ Libraries; version 1.80.0 -- https://www.boost.org/ +The Boost license encourages both commercial and non-commercial use and does +not require attribution for binary use. + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +------ + +** Alembic; version 1.8.3 -- https://github.com/alembic/alembic +TM & © 2009-2015 Lucasfilm Entertainment Company Ltd. or Lucasfilm Ltd. +All rights reserved. + +Industrial Light & Magic, ILM and the Bulb and Gear design logo are all +registered trademarks or service marks of Lucasfilm Ltd. + +© 2009-2015 Sony Pictures Imageworks Inc. All rights reserved. + +TM & © 2009-2015 Lucasfilm Entertainment Company Ltd. or Lucasfilm Ltd. +All rights reserved. + +Industrial Light & Magic, ILM and the Bulb and Gear design logo are all +registered trademarks or service marks of Lucasfilm Ltd. + +© 2009-2015 Sony Pictures Imageworks Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. +* Neither the name of Industrial Light & Magic nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +------------------------------------------------------------------------------- + +ALEMBIC ATTACHMENT A — +REQUIRED NOTICES FOR DISTRIBUTION + +The Alembic Software is distributed along with certain third party +components licensed under various open source software licenses ("Open +Source Components"). In addition to the warranty disclaimers contained +in the open source licenses found below, Industrial Light & Magic, a +division of Lucasfilm Entertainment Company Ltd. ("ILM") makes the +following disclaimers regarding the Open Source Components on behalf of +itself, the copyright holders, contributors, and licensors of such Open +Source Components: + +TO THE FULLEST EXTENT PERMITTED UNDER APPLICABLE LAW, THE OPEN SOURCE +COMPONENTS ARE PROVIDED BY THE COPYRIGHT HOLDERS, CONTRIBUTORS, +LICENSORS, AND ILM "AS IS" AND ANY REPRESENTATIONS OR WARRANTIES OF ANY +KIND, WHETHER ORAL OR WRITTEN, WHETHER EXPRESS, IMPLIED, OR ARISING BY +STATUTE, CUSTOM, COURSE OF DEALING, OR TRADE USAGE, INCLUDING WITHOUT +LIMITATION THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR +A PARTICULAR PURPOSE, AND NON-INFRINGEMENT, ARE DISCLAIMED. IN NO EVENT +WILL THE COPYRIGHT OWNER, CONTRIBUTORS, LICENSORS, OR ILM AND/OR ITS +AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION), HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THE OPEN +SOURCE COMPONENTS, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Boost C++ Libraries +------------------------------------------------------------------------ + +Boost Software License – Version 1.0 August 17th, 2003 Permission is +hereby granted, free of charge, to any person or organization obtaining +a copy of the software and accompanying documentation covered by this +license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of +the Software, and to permit third-parties to whom the Software is +furnished to do so, all subject to the following: + +The copyright notices in the Software and this entire statement, +including the above license grant, this restriction and the following +disclaimer, must be included in all copies of the Software, in whole or +in part, and all derivative works of the Software, unless such copies or +derivative works are solely in the form of machine-executable object +code generated by a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND +NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE +DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, +WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------ + +** WC Width; version 2007-05-26 -- http://www.cl.cam.ac.uk/~mgk25/ucs/wcwidth.c +Markus Kuhn -- 2007-05-26 (Unicode 5.0) + +Permission to use, copy, modify, and distribute this software +for any purpose and without fee is hereby granted. The author +disclaims all warranties with regard to this software. + +------ + +** Wayland; version 1.22.0 -- https://gitlab.freedesktop.org/wayland/wayland +Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved. +Copyright © 2011 Kristian Høgsberg +Copyright © 2011 Benjamin Franzke +Copyright © 2010-2012 Intel Corporation +Copyright © 2012 Collabora, Ltd. +Copyright © 2015 Giulio Camuffo +Copyright © 2016 Klarälvdalens Datakonsult AB, a KDAB Group company, +info@kdab.com +Copyright © 2012 Jason Ekstrand +Copyright (c) 2014 Red Hat, Inc. +Copyright © 2013 Marek Chalupa +Copyright © 2014 Jonas Ådahl +Copyright © 2016 Yong Bakos +Copyright © 2017 Samsung Electronics Co., Ltd +Copyright © 2002 Keith Packard +Copyright 1999 SuSE, Inc. +Copyright © 2012 Philipp Brüschweiler +Copyright (c) 2020 Simon Ser +Copyright (c) 2006, 2008 Junio C Hamano + +MIT Expat + +------ + +** Python; version 3.10.12 -- https://www.python.org +Copyright (c) 2001-2021 Python Software Foundation. All rights reserved. + +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, +2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software +Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +------ + +** Flex; version 2.6.4 -- https://github.com/westes/flex +Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007 The Flex Project. + +Copyright (c) 1990, 1997 The Regents of the University of California. +All rights reserved. + +This code is derived from software contributed to Berkeley by +Vern Paxson. + +The United States Government has rights in this work pursuant +to contract no. DE-AC03-76SF00098 between the United States +Department of Energy and the University of California. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +Neither the name of the University nor the names of its contributors +may be used to endorse or promote products derived from this software +without specific prior written permission. + +THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. + +This basically says "do whatever you please with this software except +remove this notice or take advantage of the University's (or the flex +authors') name". + +Note that the "flex.skl" scanner skeleton carries no copyright notice. +You are free to do whatever you please with scanners generated using flex; +for them, you are not even bound by the above copyright. + +------ + +** Jemalloc; version 5.2.1 -- https://github.com/jemalloc/jemalloc +Copyright (C) 2002-present Jason Evans . +All rights reserved. +Copyright (C) 2007-2012 Mozilla Foundation. All rights reserved. +Copyright (C) 2009-present Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice(s), + this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice(s), + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** The OpenGL Extension Wrangler Library; version 2.0.0 -- +http://glew.sourceforge.net/ +Copyright (C) 2008-2015, Nigel Stewart +Copyright (C) 2002-2008, Milan Ikits +Copyright (C) 2002-2008, Marcelo E. Magallon +Copyright (C) 2002, Lev Povalahev +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* The name of the author may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. + + +Mesa 3-D graphics library +Version: 7.0 + +Copyright (C) 1999-2007 Brian Paul All Rights Reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +BRIAN PAUL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +Copyright (c) 2007 The Khronos Group Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and/or associated documentation files (the +"Materials"), to deal in the Materials without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Materials, and to +permit persons to whom the Materials are furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Materials. + +THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. \ No newline at end of file diff --git a/blender/license/bfont.ttf.txt b/blender/license/bfont.ttf.txt new file mode 100644 index 0000000..4aa2236 --- /dev/null +++ b/blender/license/bfont.ttf.txt @@ -0,0 +1,100 @@ +Fonts are (c) Bitstream (see below). DejaVu changes are in public domain. Glyphs imported from Arev fonts are (c) Tavmjung Bah (see below) + +'DeJaVu-Lite' changes (removing characters for lighter file size) are in public domain. Source file is accompanied in this directory, BZip2 compressed, DeJaVuSans-Lite.sfd.bz2 . + + +Bitstream Vera Fonts Copyright +------------------------------ + +Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is +a trademark of Bitstream, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of the fonts accompanying this license ("Fonts") and associated +documentation files (the "Font Software"), to reproduce and distribute the +Font Software, including without limitation the rights to use, copy, merge, +publish, distribute, and/or sell copies of the Font Software, and to permit +persons to whom the Font Software is furnished to do so, subject to the +following conditions: + +The above copyright and trademark notices and this permission notice shall +be included in all copies of one or more of the Font Software typefaces. + +The Font Software may be modified, altered, or added to, and in particular +the designs of glyphs or characters in the Fonts may be modified and +additional glyphs or characters may be added to the Fonts, only if the fonts +are renamed to names not containing either the words "Bitstream" or the word +"Vera". + +This License becomes null and void to the extent applicable to Fonts or Font +Software that has been modified and is distributed under the "Bitstream +Vera" names. + +The Font Software may be sold as part of a larger software package but no +copy of one or more of the Font Software typefaces may be sold by itself. + +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, +TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME +FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING +ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF +THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE +FONT SOFTWARE. + +Except as contained in this notice, the names of Gnome, the Gnome +Foundation, and Bitstream Inc., shall not be used in advertising or +otherwise to promote the sale, use or other dealings in this Font Software +without prior written authorization from the Gnome Foundation or Bitstream +Inc., respectively. For further information, contact: fonts at gnome dot +org. + +Arev Fonts Copyright +------------------------------ + +Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the fonts accompanying this license ("Fonts") and +associated documentation files (the "Font Software"), to reproduce +and distribute the modifications to the Bitstream Vera Font Software, +including without limitation the rights to use, copy, merge, publish, +distribute, and/or sell copies of the Font Software, and to permit +persons to whom the Font Software is furnished to do so, subject to +the following conditions: + +The above copyright and trademark notices and this permission notice +shall be included in all copies of one or more of the Font Software +typefaces. + +The Font Software may be modified, altered, or added to, and in +particular the designs of glyphs or characters in the Fonts may be +modified and additional glyphs or characters may be added to the +Fonts, only if the fonts are renamed to names not containing either +the words "Tavmjong Bah" or the word "Arev". + +This License becomes null and void to the extent applicable to Fonts +or Font Software that has been modified and is distributed under the +"Tavmjong Bah Arev" names. + +The Font Software may be sold as part of a larger software package but +no copy of one or more of the Font Software typefaces may be sold by +itself. + +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL +TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. + +Except as contained in this notice, the name of Tavmjong Bah shall not +be used in advertising or otherwise to promote the sale, use or other +dealings in this Font Software without prior written authorization +from Tavmjong Bah. For further information, contact: tavmjong @ free +. fr. + diff --git a/blender/license/bmonofont-i18n.ttf.txt b/blender/license/bmonofont-i18n.ttf.txt new file mode 100644 index 0000000..20ed51c --- /dev/null +++ b/blender/license/bmonofont-i18n.ttf.txt @@ -0,0 +1,93 @@ +Blender Mono I18n font includes glyphs imported from the following fonts: + +1. DejaVu Sans Mono +2. M+ 1M Regular +3. Wen Quan Yi Micro Hei Mono +4. Droid Sans Hebrew Regular (with some edits) + +These were merged using FontForge in the above order. For each glyph, +a license of the font from which it was imported is applied. + +Feb 2020 - Cyrillic unicode range (specifically U+400-U+0525) reimported +from current version of DejaVu Sans Mono. + +---------------------------------------------------------------------- +Summary of Copyrights and Licenses + +(1) DejaVu Sans Mono + +Copyright: 2003 Bitstream, Inc. (Bitstream font glyphs) + 2006 Tavmjong Bah (Arev font glyphs) + DejaVu changes are in public domain + +License: + DejaVu font glyphs are same as bmonofont.ttf. See LICENSE-bfont.ttf.txt. + +(2) M+ 1M Regular + +Copyright: 2002-2012 M+ FONTS PROJECT + +License: + These fonts are free software. + Unlimited permission is granted to use, copy, and distribute it, with or + without modification, either commercially and noncommercially. + THESE FONTS ARE PROVIDED "AS IS" WITHOUT WARRANTY. + +(3) Wen Quan Yi Micro Hei Mono + +Copyright: 2007 Google Corporation (Digitized data) + 2008-2009 WenQuanYi Project Board of Trustees + 2008-2009 mozbug and Qianqian Fang (Droid Sans Fallback extension interface) + +License: Apache-2.0 or GPL-3 with font embedding exception + See Appendices A and B. + +(4) Droid Sans Hebrew Regular + +Copyright: 2011 Google Corporation + +License: Apache-2.0 + See Appendix A. + +---------------------------------------------------------------------- +Appendix A. Apache License Version 2.0 + + Licensed under the Apache License, Version 2.0 (the "License"); you + may not use this file except in compliance with the License. You may + obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied. See the License for the specific language governing + permissions and limitations under the License. + +---------------------------------------------------------------------- +Appendix B. GNU GPL Version 3 with font embedding exception + +GPL-3: + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Font embedding exception: + As a special exception, if you create a document which uses this + font, and embed this font or unaltered portions of this font into + the document, this font does not by itself cause the resulting + document to be covered by the GNU General Public License. This + exception does not however invalidate any other reasons why the + document might be covered by the GNU General Public License. If you + modify this font, you may extend this exception to your version of + the font, but you are not obligated to do so. If you do not wish to + do so, delete this exception statement from your version. diff --git a/blender/license/droidsans.ttf.txt b/blender/license/droidsans.ttf.txt new file mode 100644 index 0000000..66eca9c --- /dev/null +++ b/blender/license/droidsans.ttf.txt @@ -0,0 +1,121 @@ +Blender Main I18n font ("droidsans.ttf") includes glyphs imported from the following fonts: + +1. DejaVu Sans +2. Droid Sans Regular +3. Samyak-devanagari +4. Droid Sans Hebrew Regular +5. Droid Sans Ethiopic Regular +6. Samyak-tamil +7. KhmerOSsys +8. tlwg' Loma (Thaï) + +These were merged using FontForge in (approximately) the above order. For each glyph, +a license of the font from which it was imported is applied. + +Feb 2020 - Cyrillic unicode range (specifically U+400-U+0525) reimported +from current version of DejaVu Sans + +---------------------------------------------------------------------- +Summary of Copyrights and Licenses + +(1) DejaVu Sans + +Copyright: 2003 Bitstream, Inc. (Bitstream font glyphs) + 2006 Tavmjong Bah (Arev font glyphs) + DejaVu changes are in public domain + +License: + DejaVu font glyphs are same as bfont.ttf. See LICENSE-bfont.ttf.txt. + +(2), (4), (5) Droid Sans Fonts family + +Copyright: + Copyright © 2006, 2007, 2008, 2009, 2010 Google Corp. + Droid is a trademark of Google Corp. + +License: Apache-2.0 + See Appendix A. + +(3) Samyak-devanagari and (6) Samyak-tamil +Copyright: 2005-2006, Rahul Bhalerao + 2005-2006, Bageshri Salvi + 2005-2006, Pravin Satpute + 2005-2006, Sandeep Shedmake + +License: GPL-3 with font embedding exception + See Appendices B. + +(7) KhmerOSsys +Copyright: 2005, 2006 Danh Hong + 2005, 2006 Open Forum of Cambodia + +License: GPL-2.1+ + See Appendices C. + +(8) tlwg Loma (Thaï) +Copyright: 2003 National Electronics and Computer Technology Center + +Modified to fix and/or adapt kerning to Blender basic layout engine by Hồ Châu in 2016. + +License: GPL-2+ with Font exception + See Appendices C. + +---------------------------------------------------------------------- +Appendix A. Apache License Version 2.0 + + Licensed under the Apache License, Version 2.0 (the "License"); you + may not use this file except in compliance with the License. You may + obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied. See the License for the specific language governing + permissions and limitations under the License. + +---------------------------------------------------------------------- +Appendix B. GNU GPL Version 3 with font embedding exception + +GPL-3: + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Font embedding exception: + As a special exception, if you create a document which uses this + font, and embed this font or unaltered portions of this font into + the document, this font does not by itself cause the resulting + document to be covered by the GNU General Public License. This + exception does not however invalidate any other reasons why the + document might be covered by the GNU General Public License. If you + modify this font, you may extend this exception to your version of + the font, but you are not obligated to do so. If you do not wish to + do so, delete this exception statement from your version. + +---------------------------------------------------------------------- +Appendix C. GNU GPL Version 2.1 + +This font is free software; you can redistribute it and/or modify it +under the terms of the GNU Lesser General Public License as published +by the Free Software Foundation; either version 2.1 of the License, or +(at your option) any later version. + +This library is distributed in the hope that it will be useful, but +WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY +or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +License for more details. + +On Debian systems, the complete text of the GNU Lesser General Public +License can be found in the file /usr/share/common-licenses/LGPL-2. + diff --git a/blender/license/jemalloc.txt b/blender/license/jemalloc.txt new file mode 100644 index 0000000..cc6be93 --- /dev/null +++ b/blender/license/jemalloc.txt @@ -0,0 +1,29 @@ +jemalloc is released under the terms of the following BSD-derived license: + +Copyright (C) 2002-2013 Jason Evans . +All rights reserved. +Copyright (C) 2007-2012 Mozilla Foundation. All rights reserved. +Copyright (C) 2009-2013 Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice(s), + this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice(s), + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Copyright (C) 2013 Jason Evans . + +Last updated 2013/03/07. diff --git a/blender/readme.html b/blender/readme.html new file mode 100644 index 0000000..41f527e --- /dev/null +++ b/blender/readme.html @@ -0,0 +1,111 @@ + + + + + + Blender Readme + + + +

Blender 3.6

+


+

About

+

+Welcome to Blender, the free, open source 3D application for modeling, rigging, animation, +simulation, rendering, compositing, motion tracking, and video editing. + +Blender is available for Linux, macOS and Windows and has a large world-wide community. +

+

+Blender can be used freely for any purpose, including commercial use and distribution. +It's free and open-source software, released under the GNU GPL licence. +The entire source code is available on our website. +

+

+For more information, visit blender.org. +

+


+

3.6

+

+The Blender Foundation and online developer community is proud to present Blender 3.6. + +More information about this release. +

+


+

Bugs

+

+Although this is considered a stable release, you may encounter a bug. +If you do, please help us by posting it in the bug tracker or using Help + Report a Bug from inside Blender. +If it wasn’t reported yet, please log in (or register) and fill in detailed information about the error. +Please post detailed instructions on how to reproduce it or post a .blend file showcasing the bug. +

+


+

Package Contents

+

The downloaded Blender package includes:

+

• The Blender application for the chosen operating system.

+

• Add-ons to extend Blender functionality.

+

• Readme and copyright files.

+


+

Installation

+

+Windows: The download .zip contains a Blender folder. You may put this anywhere on your hard drive. +To launch Blender, double-click on Blender.exe. +

+

+Linux: Unpack the archive, then run the Blender executable. +

+

+macOS: The downloaded package includes Blender.app. +Optionally copy this to your Applications folder, and add it to the dock by dragging it from there to the dock. +

+


+

+Installing Addons (all systems) Addons can be installed from the user preferences addons section, +download an addon as a .py or .zip file, then press the "Install Addon" button and select the file to install it. +

+


+

Links

+

Users:

+

+General information +www.blender.org
+Release Notes +wiki.blender.org/wiki/Reference/Release_Notes/3.6
+Tutorials +www.blender.org/support/tutorials/
+Manual https://docs.blender.org/manual/en/latest/
+User Forum +www.blenderartists.org
+Chat +#today or +#support on blender.chat
+

+

Developers:

+

+Development +www.blender.org/get-involved/developers/
+GIT and Bug Tracker +projects.blender.org
+Chat +#blender-coders on blender.chat
+

+


+


+

Blender is open-source and free for all to use.

+


+

Enjoy.

+


+ + diff --git a/blender/resize_model.py b/blender/resize_model.py new file mode 100644 index 0000000..5084083 --- /dev/null +++ b/blender/resize_model.py @@ -0,0 +1,193 @@ +from math import radians +import sys, platform, os, time, bpy, requests, json, bmesh, shutil +from PIL import Image +import platform +# if platform.system() == 'Windows': +# sys.path.append('e:\\libs\\') +# else: +# sys.path.append('/data/deploy/make3d/make2/libs/') +sys.path.append('/home/acprint/code/libs/') +import config, libs + +def bmesh_copy_from_object(obj, transform=True, triangulate=True, apply_modifiers=False): + """Returns a transformed, triangulated copy of the mesh""" + assert obj.type == 'MESH' + if apply_modifiers and obj.modifiers: + import bpy + depsgraph = bpy.context.evaluated_depsgraph_get() + obj_eval = obj.evaluated_get(depsgraph) + me = obj_eval.to_mesh() + bm = bmesh.new() + bm.from_mesh(me) + obj_eval.to_mesh_clear() + else: + me = obj.data + if obj.mode == 'EDIT': + bm_orig = bmesh.from_edit_mesh(me) + bm = bm_orig.copy() + else: + bm = bmesh.new() + bm.from_mesh(me) + if transform: + matrix = obj.matrix_world.copy() + if not matrix.is_identity: + bm.transform(matrix) + matrix.translation.zero() + if not matrix.is_identity: + bm.normal_update() + if triangulate: + bmesh.ops.triangulate(bm, faces=bm.faces) + return bm + +def fix_link_texture(pid): + # 修改obj中的mtl文件为pid_original.mtl + path = os.path.join(workdir, 'print', f'{pid}_{orderId}') + filename = os.path.join(path, f'{pid}_original.obj') + + with open(filename, 'r') as f: + lines = f.readlines() + for i in range(len(lines)): + if lines[i].startswith('mtllib'): + lines[i] = f'mtllib {pid}_original.mtl\n' + break + with open(filename, 'w') as f: + f.writelines(lines) + + f.close() + + # 将pid.mtl文件复制为pid_original.mtl _decimate + shutil.copy(os.path.join(path, f'{pid}.mtl'), os.path.join(path, f'{pid}_original.mtl')) + shutil.copy(os.path.join(path, f'{pid}Tex1.jpg'), os.path.join(path, f'{pid}Tex1_decimate.jpg')) + texture_file = os.path.join(path, f'{pid}Tex1_decimate.jpg') + if os.path.exists(texture_file): + img = Image.open(texture_file) + img = img.resize((int(img.size[0] * 0.5), int(img.size[1] * 0.5))) + img.save(texture_file, quality=90, optimize=True) + print('resize texture file to 50% success') + # 修改pid_original.mtl文件中的贴图为pid_old.jpg + with open(os.path.join(path, f'{pid}_original.mtl'), 'r') as f: + lines = f.readlines() + for i in range(len(lines)): + if lines[i].startswith('map_Kd'): + lines[i] = f'map_Kd {pid}Tex1_decimate.jpg\n' + break + with open(os.path.join(path, f'{pid}_original.mtl'), 'w') as f: + f.writelines(lines) + + f.close() + + +def main(): + start = time.time() + + get_printsize_url = 'https://mp.api.suwa3d.com/api/printOrder/info' + upload_obj_volume_url = 'https://mp.api.suwa3d.com/api/physical/add' # ?pid=1&order_id=1&faces=1&volume=1 + + + res = requests.get(f'{get_printsize_url}?id={orderId}') + print('获取打印尺寸:', res.text) + + if res.status_code == 200: + pid = res.json()['data']['pid'] + path = os.path.join(workdir, 'print', f'{pid}_{orderId}') + filename = os.path.join(path, f'{pid}.obj') + bpy.ops.object.delete(use_global=False, confirm=False) + bpy.context.scene.unit_settings.scale_length = 0.001 + bpy.context.scene.unit_settings.length_unit = 'CENTIMETERS' + bpy.context.scene.unit_settings.mass_unit = 'GRAMS' + + print('正在处理:', filename) + bpy.ops.import_scene.obj(filepath=filename) + obj = bpy.context.selected_objects[0] + print('原始模型尺寸:', obj.dimensions) + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + print('应用后模型尺寸:', obj.dimensions) + shutil.copy(filename, os.path.join(path, f'{pid}_original.obj')) + filename_original = os.path.join(path, f'{pid}_original.obj') + + for f in res.json()['data']['fileList']: + + if "undefined" in f: + continue + + try: + height = float(f.split('_')[-2][:-2]) * 10 + except Exception as e: + print("eeee",e) + return + + + obj = bpy.context.selected_objects[0] + print(f'{f}处理前{height}mm模型尺寸: {obj.dimensions}') + scale = height / obj.dimensions.z + obj.scale = (scale, scale, scale) + bpy.ops.object.transform_apply(scale=True) + print(f'{f}处理后{height}mm模型尺寸: {obj.dimensions}') + + bpy.ops.export_scene.obj(filepath=os.path.join(path, f'{pid}.obj')) + if os.path.exists(os.path.join(path, f)): + os.remove(os.path.join(path, f)) + os.rename(os.path.join(path, f'{pid}.obj'), os.path.join(path, f)) + config.oss_bucket.put_object_from_file(f'objs/print/{pid}/{f}', os.path.join(path, f)) + + # 重新加载模型,然后生成数字模型 + bpy.ops.object.delete(use_global=False, confirm=False) + fix_link_texture(pid) + bpy.ops.import_scene.obj(filepath=filename_original) + bpy.context.scene.unit_settings.scale_length = 0.001 + bpy.context.scene.unit_settings.length_unit = 'CENTIMETERS' + bpy.context.scene.unit_settings.mass_unit = 'GRAMS' + + obj = bpy.context.selected_objects[0] + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + + scale = 90 / obj.dimensions.y + obj.scale = (scale, scale, scale) + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + # if str(pid) == '120745': bpy.ops.wm.save_as_mainfile(filepath=os.path.join(path, f'{pid}_{orderId}.blend')) + + headcount = res.json()['data']['headcount'] + + bm = bmesh_copy_from_object(obj) + obj_volume = round(bm.calc_volume() / 1000, 3) + print('volume:', obj_volume) + print('weight:', obj_volume * 1.2, 'g') + tempWeight = obj_volume * 1.2 + faces = len(obj.data.polygons) + print('faces:', faces) + upload_res = requests.get(f'{upload_obj_volume_url}?pid={pid}&order_id={orderId}&faces={faces}&volume={obj_volume}&headcount={headcount}&weight={tempWeight}') + print('上传模型体积:', upload_res.text) + + # 生成数字模型 + + faces_dest = 120000 * headcount + # 减面 + faces_current = len(obj.data.polygons) + bpy.ops.object.modifier_add(type='DECIMATE') + bpy.context.object.modifiers["Decimate"].ratio = faces_dest / faces_current + bpy.ops.object.modifier_apply(modifier="Decimate") + + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_VOLUME', center='MEDIAN') + bpy.context.object.location[0] = 0 + bpy.context.object.location[1] = 0 + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + + bpy.ops.export_scene.obj(filepath=os.path.join(path, f'{pid}_decimate.obj')) + bpy.ops.export_scene.gltf(filepath=os.path.join(path, f'{pid}_decimate.glb'), export_format='GLB', export_apply=True, export_jpeg_quality=80) + config.oss_bucket.put_object_from_file(f'glbs/3d/{pid}.glb', os.path.join(path, f'{pid}_decimate.glb')) + bpy.ops.wm.quit_blender() + + +if __name__ == '__main__': + if platform.system() == 'Windows': + workdir = 'E:\\' + else: + workdir = '/data/datasets/' + + if len(sys.argv) - (sys.argv.index("--") + 1) < 1: + print("Usage: blender -b -P resize_model.py -- ") + sys.exit(1) + orderId = sys.argv[sys.argv.index("--") + 1] + + main() diff --git a/blender/subprocess_fill_dm_code.py b/blender/subprocess_fill_dm_code.py new file mode 100644 index 0000000..57d0970 --- /dev/null +++ b/blender/subprocess_fill_dm_code.py @@ -0,0 +1,22 @@ +import subprocess +import sys +blender_cmd = "blender" +blender_script = "fill_dm_code.py" +blender_process = subprocess.Popen([blender_cmd,"-b","-P",blender_script],stdout=subprocess.PIPE,stderr=subprocess.PIPE,text=True) + +#实时读取输出 +while True: + output_line = blender_process.stdout.readline() + if output_line == "" and process.poll() is not None: + break + if output_line: + print(output_line.strip()) + + +error = blender_process.communicate()[1] + +if error: + print("Error",error.strip()) + +blender_process.wait() + diff --git a/config/nose.yaml b/config/nose.yaml new file mode 100644 index 0000000..5555457 --- /dev/null +++ b/config/nose.yaml @@ -0,0 +1,24 @@ +Nose_Config: + Select_system: + Windows_path: + workdir: 'D:\\print\\' + pids_txt: '\Texture_photos.txt' + + no_detect_nose_error: 'no_detect_nose_error.log' + small_face_error: 'small_face_error.log' + mediapipe_judge_fail: 'mediapipe_judge_fail.log' + run_logging: 'run_logging.log' + + Linux_path: + workdir: '/data/datasets/fix' + workdirImgPath: '/data/datasets/print' + pids_txt: '\Texture_photos.txt' + + no_detect_nose_error: 'no_detect_nose_error.log' + small_face_error: 'small_face_error.log' + mediapipe_judge_fail: 'mediapipe_judge_fail.log' + run_logging: 'run_logging.log' + + Personal_parameter: + image_angle_list: [0, 90, 180, 270] + draw_nose_rectangle: False diff --git a/doingLog/81808.txt b/doingLog/81808.txt new file mode 100644 index 0000000..56a6051 --- /dev/null +++ b/doingLog/81808.txt @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/doingLog/82343.txt b/doingLog/82343.txt new file mode 100644 index 0000000..56a6051 --- /dev/null +++ b/doingLog/82343.txt @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/doingLog/82551.txt b/doingLog/82551.txt new file mode 100644 index 0000000..56a6051 --- /dev/null +++ b/doingLog/82551.txt @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/doingLog/82659.txt b/doingLog/82659.txt new file mode 100644 index 0000000..56a6051 --- /dev/null +++ b/doingLog/82659.txt @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/fix_nose.py b/fix_nose.py new file mode 100644 index 0000000..4f9cfdd --- /dev/null +++ b/fix_nose.py @@ -0,0 +1,443 @@ +''' + 找到贴图图片中的人脸,并对人脸的鼻孔部分进行肉色填充,去除黑色鼻孔。 +''' + +import os,platform +import sys +import time +import yaml + +import cv2 +import dlib +import numpy as np +import mediapipe as mp +import logging +import shutil + + +def rotate_img_fill_bound(image, angle): + ''' + 仿射变换,旋转图片angle角度,缺失背景白色(0, 0, 0)填充 + :param image: 需要旋转的图片; + :param angle: 旋转角度; + :return: 输出旋转后的图片。 + ''' + if image is None: + return None + # 获取图像的尺寸,确定中心 + (h, w) = image.shape[:2] + (cX, cY) = (w // 2, h // 2) + # 获取旋转矩阵(应用角度的负数 顺时针旋转),然后抓取正弦和余弦 (即矩阵的旋转分量) + # -angle位置参数为角度参数负值表示顺时针旋转; 1.0位置参数scale是调整尺寸比例(图像缩放参数),建议0.75 + M = cv2.getRotationMatrix2D((cX, cY), -angle, 1.0) + cos = np.abs(M[0, 0]) + sin = np.abs(M[0, 1]) + # 计算图像的新边界尺寸 + nW = int((h * sin) + (w * cos)) + nH = int((h * cos) + (w * sin)) + # 调整旋转矩阵以考虑平移 + M[0, 2] += (nW / 2) - cX + M[1, 2] += (nH / 2) - cY + # 执行实际旋转并返回图像 + # borderValue 缺失背景填充色彩,此处为白色,默认是黑色,可自定义 + img_result = cv2.warpAffine(image, M, (nW, nH), borderValue=(0, 0, 0)) + + return img_result + + +def get_face_hog(image): + ''' + 从原始图像中获取人脸位置,并记录信息,没有检测到人脸,坐标使用 [0, 0, 0, 0] 替代。 + :param image: 需要识别到原始图片; + :return: 从原图抠取的人脸图片face_img,及其坐标信息[x1, y1, x2, y2]。 + ''' + hog_face_detector = dlib.get_frontal_face_detector() # 加载预训练的 HoG 人脸检测器 + # results:存在人脸:rectangles[[(x1, y1) (x2, y2)]],不存在人脸:rectangles[] + results = hog_face_detector(image, 0) # 对图片进行人脸检测 + # print('face_num:', len(results)) + if results is None: + return 0, 0, 0, 0, 0 + for bbox in results: + x1 = bbox.left() # 人脸左上角x坐标 + y1 = bbox.top() # 人脸左上角y坐标 + x2 = bbox.right() # 人脸右下角x坐标 + y2 = bbox.bottom() # 人脸右下角y坐标 + face_img = image[y1:y2, x1:x2] + + return face_img, x1, y1, x2, y2 + + +def is_face_detected(image): + ''' + 判断图片是否检测到人脸 + :param image: 被判断的图片 + :return: True/False + ''' + face_detection = mp.solutions.face_detection.FaceDetection(min_detection_confidence=0.5) # 创建FaceDetection对象 + image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + results = face_detection.process(image_rgb) # 调用process函数来检测人脸 + # 判断是否检测到了人脸 + if results.detections is None: + return False + else: + return True + + +def cv_show(name, img): + ''' + 展示图片 + :param name: 窗口名 + :param img: 展示的图片 + ''' + cv2.imshow(name, img) + cv2.waitKey(0) + cv2.destroyAllWindows() + + +def get_distance(p1, p2): + ''' + 计算两个像素点之间的距离 + :param p1: 坐标点1 + :param p2: 坐标点2 + :return: p1,p2 像素之间的距离 + ''' + distance = np.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2) + distance = round(distance, 2) + return distance + + +def compute_distance(rect1, rect2): + ''' + rect1, rect2 是鼻孔矩形列表(nose_list)的元素,元素格式:(x, y, w, h, ...) + :param rect1: (x1, y1, w1, h1, ...1) + :param rect2: (x2, y2, w2, h2, ...2) + :return: 矩形中心点之间的距离 + ''' + x1, y1, w1, h1 = rect1[:-1] + x2, y2, w2, h2 = rect2[:-1] + center_x1 = x1 + w1 / 2 + center_y1 = y1 + h1 / 2 + center_x2 = x2 + w2 / 2 + center_y2 = y2 + h2 / 2 + distance = get_distance((center_x1, center_y1), (center_x2, center_y2)) + return distance + + +def filter_rectangles(rectangles): + ''' + 鼻孔筛选条件,限制鼻孔位置,去除非鼻孔轮廓的干扰 + :param rectangles: 列表,元素格式:(x, y, w, h, ...) + :return: 新列表,元素格式:(x, y, w, h, ...) + ''' + filtered_rectangles = [] + for i in range(len(rectangles)): + for j in range(i+1, len(rectangles)): + rect1 = rectangles[i] + rect2 = rectangles[j] + distance = compute_distance(rect1, rect2) + max_width = max(rect1[2], rect2[2]) + x1, y1, w1, h1 = rect1[:-1] + x2, y2, w2, h2 = rect2[:-1] + if max_width < distance < 3 * max_width and 4 * max(w1, w2) > abs(x1 - x2) > w1 and 3 * max(h1, h2) > abs(y1 - y2) >= 0: + filtered_rectangles.append((rect1, rect2)) + else: + return None + return filtered_rectangles + + +def lock_nose(face_img): + ''' + 锁定人脸图片中的鼻孔位置:通过查找图片轮廓,筛选轮廓,找到鼻孔位置。 + :param face_img: 导入人脸图片 + :return:处理好的 face_img, 鼻孔的位置信息 nose_list + ''' + face_h2, face_w2, _ = face_img.shape + face_gray = cv2.cvtColor(face_img, cv2.COLOR_BGR2GRAY) + retval, thresh = cv2.threshold(face_gray, 0, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU) + contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + nose_list = [] + for cnt in contours: + area = cv2.contourArea(cnt) + # 1.面积筛选轮廓 + if 200 <= area <= 1600: + rect = cv2.boundingRect(cnt) + x, y, w, h = rect + # 2.宽高比筛选轮廓 + if 0.5 < w / h < 4: # 鼻孔大致为椭圆形,所以要判断长宽比 + # 3.加一个距离筛选,去除眼睛的轮廓; + face_center = (int(face_w2 / 2), int(face_h2 / 2)) + cnt_point = (int(x + w / 2), int(y + h / 2)) + cnt_distance = get_distance(face_center, cnt_point) + if cnt_distance < int(face_h2 / 6): # 距离筛选 + # 4.颜色筛选 ———— 鼻孔中心点像素值较暗 + pixel_center = face_img[int(y + h / 2), int(x + w / 2)] + nose_list.append((x, y, w, h, sum(pixel_center))) # 存储满足上述条件的鼻孔位置信息 + # print('初步筛选得到的鼻孔列表nose_list:', nose_list) + if len(nose_list) == 2: + x1, y1, w1, h1 = nose_list[0][:-1] + x2, y2, w2, h2 = nose_list[1][:-1] + max_width = max(w1, w2) + distance = compute_distance(nose_list[0], nose_list[1]) + if max_width < distance < 3 * max_width and 4 * max(w1, w2) > abs(x1 - x2) > w1 and 3 * max(h1, h2) > abs( + y1 - y2) >= 0: + for x, y, w, h, _ in nose_list: + if draw_nose_rectangle: + cv2.rectangle(face_img, (x, y), (x + w, y + h), (0, 255, 0), 2) + return face_img, nose_list + else: + # print('两鼻孔未满足筛选条件!') + return None + elif len(nose_list) > 2: + new_nose_list = filter_rectangles(nose_list) + # print('鼻孔数大于2,筛选后 new_nose_list:', new_nose_list) + if new_nose_list is None: + # print('鼻孔不满足筛选条件!') + logging.info('鼻孔不满足函数filter_rectangles筛选条件!') + return None + else: + for x, y, w, h, _ in new_nose_list: + if draw_nose_rectangle: + cv2.rectangle(face_img, (x, y), (x + w, y + h), (0, 255, 0), 2) + return face_img, new_nose_list + else: + # print('未检测到两个鼻孔,跳过...') + logging.info('未检测到两个鼻孔,跳过...') + for x, y, w, h, _ in nose_list: + if draw_nose_rectangle: + cv2.rectangle(face_img, (x, y), (x + w, y + h), (0, 255, 0), 2) + return None + + +def select_nose_contour(image, fill_pixel, iterations=0): + ''' + 挑选出鼻孔轮廓位置,如果鼻孔轮廓和鼻孔图片外轮廓连接到一起,需要分隔开。 + :param image: 待处理的鼻孔原图 + :param fill_pixel: 需要填充鼻孔的像素 + :param iterations: 膨胀次数 + :return: 处理完的鼻孔图片 + ''' + max_cnt = "" + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + retval, img_thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU) + kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3)) + # 对黑色区域来说是腐蚀,对白色区域来说是膨胀,闭运算 + image_erode = cv2.morphologyEx(img_thresh, cv2.MORPH_CLOSE, kernel, iterations=2) + # 选取黑色区域中面积最大的(需要除去图片整体的外轮廓) ———— 鼻孔 + contours, hierarchy = cv2.findContours(image_erode, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + sum_area = [] + for ccc in contours: + area1 = cv2.contourArea(ccc) + sum_area.append((ccc, area1)) # 存储轮廓数据和该轮廓面积 + areas_list = sorted(sum_area, key=lambda x: x[1], reverse=True) # 降序 + # 轮廓数目判断,并且轮廓面积必须达到一定面积,暂定大于图片总面积的 1/6 ! + if len(areas_list) > 1: + if areas_list[1][1] > (areas_list[0][1] / 6): + max_cnt = areas_list[1][0] + else: + return None + else: + # 注意:鼻孔颜色较浅,鼻孔外围颜色较深,那么两者二值化图像黑色块融合在一起了! + # 此时,总轮廓数目为 1 ,鼻孔轮廓和外轮廓连接在一起,解决办法:将鼻孔二值图片外围一圈以3个像素为单位全部填充为白色 + height, width = gray.shape + # 构造一个新的大一圈的白色矩阵,并在中心部分复制二值图片。 + new_img = np.ones((height, width), dtype=np.uint8) * 255 + new_img[3:-3, 3:-3] = gray[3:-3, 3:-3] + # 使用 rectangle 函数填充矩阵外围为白色 + cv2.rectangle(new_img, (0, 0), (width + 5, height + 5), 255, thickness=3) + # 再重新获取鼻孔轮廓 + new_contours, new_hierarchy = cv2.findContours(new_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + new_sum_area = [] + for ccc2 in new_contours: + are1 = cv2.contourArea(ccc2) + new_sum_area.append((ccc2, are1)) + new_areas_list = sorted(new_sum_area, key=lambda x: x[1], reverse=True) + if len(new_areas_list) > 1: + # print('图片外轮廓和鼻孔轮廓链接到一起,处理中...') + logging.info('图片外轮廓和鼻孔轮廓链接到一起,处理中...') + if new_areas_list[1][1] > (new_areas_list[0][1] / 6): + max_cnt = new_areas_list[1][0] + else: + return None + hull_cnt = cv2.convexHull(max_cnt) + # cv2.drawContours(image, [max_cnt], -1, (0, 255, 0), 1) # 展示 + # cv2.polylines(image, [hull], True, (0, 0, 255), 1) + # cv2.imshow('image', image) + mask = np.zeros_like(img_thresh) + cv2.drawContours(mask, [hull_cnt], 0, 255, -1) # 使用白色(255)填充整个轮廓(-1)区域 + mask_dilate = cv2.dilate(mask, kernel, iterations=iterations) + indices = np.where(mask_dilate == 255) + # 替换指定像素值 + image[indices[0], indices[1]] = fill_pixel + + return image + + +def nose_color_fill(face_img, nose_list, pid): + ''' + 对获取到的左右鼻孔填充像素 + :param face_img: 未处理的人脸图片 + :param nose_list: 人脸图片左右鼻孔位置信息 + :param pid: 图片id,这里主要用于打印处理过程图片 + :return: face_img:鼻孔填充颜色后的人脸图片 + ''' + # 1.获取填充鼻孔的像素 ———— 取左右鼻孔外界矩形中心坐标,鼻尖坐标是两者连线的中间点,以该点像素作为填充像素 + pixel_nose_list = [] + for x, y, w, h, _ in nose_list: + pixel_nose_list.append((int(x + w / 2), int(y + h / 2))) + fill_pixel_coor = (int((pixel_nose_list[0][0] + pixel_nose_list[1][0]) / 2), + int((pixel_nose_list[0][1] + pixel_nose_list[1][1]) / 2)) # 鼻尖点坐标(x, y) + fill_pixel = face_img[fill_pixel_coor[1], fill_pixel_coor[0]] # 鼻尖像素值 + # 2.找到需要改变像素的鼻孔区域,扩大截取鼻孔矩形范围,包裹整个鼻孔 + # if nose_list[0][0] < nose_list[1][0]: # x1 < x2 + # 左鼻孔:nose_list[0] ====》 x1, y1, w1, h1, sum(pixel_center)1;face_img[(y1-2):(y1+h1+2), (x1-2):(x1+w1+2)] + # 右鼻孔:nose_list[1] ====》 x2, y2, w2, h2, sum(pixel_center)2;face_img[(y2-2):(y2+h2+2), (x2-2):(x2+w2+2)] + nose_list = sorted(nose_list, key=lambda z: z[0]) # 以 x 大小进行升序,x1 < x2 ,x1是左鼻孔,x2是右鼻孔 + # 鼻孔尺寸调整外扩矩形框 + l_add_w = int(np.ceil(nose_list[0][2] / 10)) # 向上取整 + l_add_h = int(np.ceil(nose_list[0][3] / 10)) + r_add_w = int(np.ceil(nose_list[1][2] / 10)) + r_add_h = int(np.ceil(nose_list[1][3] / 10)) + # 图片尺寸调整外扩矩形框 + img_h, img_w = face_img.shape[:2] + add_img_h = int(np.ceil(img_h / 100)) + add_img_w = int(np.ceil(img_w / 100)) + left_nose_img = face_img[(nose_list[0][1] - l_add_h - add_img_h):(nose_list[0][1] + nose_list[0][3] + l_add_h + add_img_h), + (nose_list[0][0] - l_add_w - add_img_w):(nose_list[0][0] + nose_list[0][2] + l_add_w + add_img_w)] + right_nose_img = face_img[(nose_list[1][1] - r_add_h - add_img_h):(nose_list[1][1] + nose_list[1][3] + r_add_h + add_img_h), + (nose_list[1][0] - r_add_w - add_img_w):(nose_list[1][0] + nose_list[1][2] + r_add_w + add_img_w)] + + # select_nose_contour函数里面的外边界轮廓和内边界轮廓分开 + left_nose_img = select_nose_contour(left_nose_img, fill_pixel, iterations=0) + right_nose_img = select_nose_contour(right_nose_img, fill_pixel, iterations=0) + if left_nose_img is None or right_nose_img is None: # 检测到两鼻孔,但是不满足筛选条件的情况 + return None + # 高斯模糊 + left_nose_gauss = cv2.GaussianBlur(left_nose_img, (7, 7), 0) + right_nose_gauss = cv2.GaussianBlur(right_nose_img, (7, 7), 0) + face_img[(nose_list[0][1] - l_add_h - add_img_h):(nose_list[0][1] + nose_list[0][3] + l_add_h + add_img_h), + (nose_list[0][0] - l_add_w - add_img_w):(nose_list[0][0] + nose_list[0][2] + l_add_w + add_img_w)] = left_nose_gauss + face_img[(nose_list[1][1] - r_add_h - add_img_h):(nose_list[1][1] + nose_list[1][3] + r_add_h + add_img_h), + (nose_list[1][0] - r_add_w - add_img_w):(nose_list[1][0] + nose_list[1][2] + r_add_w + add_img_w)] = right_nose_gauss + + return face_img + + +def main(pids): + ''' + 批量处理人脸图片。 + :param pids: 待处理图片的id + :return: + ''' + # 设置日志等级为最低(DEBUG),并保存到文件中 + logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', + filename=os.path.join(workdir, run_logging)) + for item in pids: + pid, order_id = item.split('_') + start_time = time.time() + print('正在处理图片 {} ...'.format(pid)) + logging.info('正在处理图片 {} ...'.format(pid)) + # 判断文件目录是否存在,不存在则创建 + # if not os.path.exists(workdir + f'/{pid}/'): + # os.makedirs(workdir + f'/{pid}/') + image_path = os.path.join(workdirImgPath, f'{pid}_{order_id}', f'{pid}Tex1.jpg') + copy_save_path = os.path.join(workdirImgPath,"copy_texture", f'{pid}_{order_id}', f'{pid}Tex1_old.jpg') + + #判断是否存在目录,不存在就创建 + if not os.path.exists( os.path.join(workdirImgPath,'copy_texture', f'{pid}_{order_id}')): + os.makedirs(os.path.join(workdirImgPath,'copy_texture', f'{pid}_{order_id}')) + + save_path = os.path.join(workdirImgPath, f'{pid}_{order_id}', f'{pid}Tex1.jpg') # 保存修改后图像 + shutil.copy(image_path, copy_save_path) # 备份原图,复制操作 + image = cv2.imread(image_path) + for img_angle in image_angle_list: + # save_face_path = os.path.join(workdir, save_face_prefix, f'{pid}_{img_angle}.jpg') + img_rotated = rotate_img_fill_bound(image, img_angle) + face_hog = get_face_hog(img_rotated) + if face_hog is None: + print('图片 {0} 旋转角度为{1}时,没有检测到人脸,跳过...'.format(pid, img_angle)) + logging.info('图片 {0} 旋转角度为{1}时,没有检测到人脸,跳过...'.format(pid, img_angle)) + else: + print('图片 {0} 旋转角度为{1}时,检测到人脸,处理中...'.format(pid, img_angle)) + logging.info('图片 {0} 旋转角度为{1}时,检测到人脸,处理中...'.format(pid, img_angle)) + face_img, x1, y1, x2, y2 = get_face_hog(img_rotated) # 注意:这里会存在错误检测的人脸! + face_h, face_w, _ = face_img.shape + if face_h >= 300 and face_w >= 300: # 去除face_img 尺寸较小的图片 + judge = is_face_detected(face_img) # mediapipe人脸识别筛除非人脸部分 + if judge: + # print('mediapipe人脸识别成功!') + if lock_nose(face_img) is None: + print('鼻孔不满足筛选条件!') + continue + # print('lock_nose检测到的鼻孔满足筛选条件!') + draw_nose_face, nose_list = lock_nose(face_img) + face_img_result = nose_color_fill(face_img, nose_list, pid) + if face_img_result is None: + print('旋转角度为{1}时,两个鼻孔可以检测到,但是鼻孔颜色深浅存在问题,处理失败...') + continue + # 将处理完成的人脸图片还原到原图中去,并将角度还原为初始状态 + img_rotated[y1:y2, x1:x2] = face_img_result + result_img = rotate_img_fill_bound(img_rotated, -img_angle) + # 保存图片的质量是原图的 95% + # if not os.path.exists(os.path.join(workdir, save_face_prefix)): + # os.makedirs(os.path.join(workdir, save_face_prefix)) + # cv2.imwrite(save_face_path, face_img_result, [cv2.IMWRITE_JPEG_QUALITY, 95]) + # print('图片 {0} 旋转角度为{1}时,人脸已保存!'.format(pid, img_angle)) + # logging.info('图片 {0} 旋转角度为{1}时,人脸已保存!'.format(pid, img_angle)) + print('图片 {0} 旋转角度为{1}时,鼻孔处理成功!'.format(pid, img_angle)) + logging.info('图片 {0} 旋转角度为{1}时,鼻孔处理成功!'.format(pid, img_angle)) + image = result_img # 将旋转后的处理结果保存为新的图片,再次进行旋转处理 + continue + else: + # mediapipe算法识别为非人脸 + print('图片 {0} 旋转角度为{1}时,mediapipe算法判断为非人脸,跳过...'.format(pid, img_angle)) + logging.info('图片 {0} 旋转角度为{1}时,mediapipe算法判断为非人脸,跳过...'.format(pid, img_angle)) + nose_error = open(os.path.join(workdir, mediapipe_judge_fail), 'a') + nose_error.write( + f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} {pid} 人脸二级筛选失败,跳过...\n') + continue + else: + print('图片 {0} 旋转角度为{1}时,检测到人脸尺寸小于300*300,跳过...'.format(pid, img_angle)) + logging.info('图片 {0} 旋转角度为{1}时,检测到人脸尺寸小于300*300,跳过...'.format(pid, img_angle)) + nose_error = open(os.path.join(workdir, small_face_error), 'a') + nose_error.write( + f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} {pid} 检测到人脸尺寸小于300*300,跳过...\n') + continue + # 保存图片的质量是原图的 95% + if not os.path.exists(os.path.join(workdirImgPath, f'{pid}_{order_id}')): + os.makedirs(os.path.join(workdirImgPath, f'{pid}_{order_id}')) + cv2.imwrite(save_path, image, [cv2.IMWRITE_JPEG_QUALITY, 95]) + end_time = time.time() + solve_time = start_time - end_time + print('图片:{0} 已处理完成并保存,处理时间:{1}!'.format(pid, solve_time)) + logging.info('图片:{0} 已处理完成并保存,处理时间:{1}!'.format(pid, solve_time)) + + +if __name__ == '__main__': + with open("config/nose.yaml", 'r') as f: + Config = yaml.load(f, Loader=yaml.FullLoader) + + os_type = platform.system() + + workdir = Config['Nose_Config']['Select_system'][f'{os_type}_path']['workdir'] + workdirImgPath = Config['Nose_Config']['Select_system'][f'{os_type}_path']['workdirImgPath'] + pids_txt = Config['Nose_Config']['Select_system'][f'{os_type}_path']['pids_txt'] + + no_detect_nose_error = Config['Nose_Config']['Select_system'][f'{os_type}_path']['no_detect_nose_error'] + small_face_error = Config['Nose_Config']['Select_system'][f'{os_type}_path']['small_face_error'] + mediapipe_judge_fail = Config['Nose_Config']['Select_system'][f'{os_type}_path']['mediapipe_judge_fail'] + run_logging = Config['Nose_Config']['Select_system'][f'{os_type}_path']['run_logging'] + + image_angle_list = Config['Nose_Config']['Personal_parameter']['image_angle_list'] # 图片旋转角度0、90、180、270 + draw_nose_rectangle = Config['Nose_Config']['Personal_parameter']['draw_nose_rectangle'] # 是否需要画出鼻孔的矩形框 + + if len(sys.argv) == 2: + if sys.argv[1] == 'all': + # with open('datasets/pids_all.txt', 'r') as f: + with open(os.path.join(workdir, pids_txt), 'r') as f: + pids = f.read().split(',') + else: + pids = sys.argv[1].split(',') + main(pids) + else: + print('用法:python nose_processing.py ') + sys.exit(0) diff --git a/fonts/Helvetica.ttf b/fonts/Helvetica.ttf new file mode 100644 index 0000000..718f22d Binary files /dev/null and b/fonts/Helvetica.ttf differ diff --git a/foot_update_res.log b/foot_update_res.log new file mode 100644 index 0000000..e69de29 diff --git a/get_preview_image.py b/get_preview_image.py new file mode 100644 index 0000000..0e459b5 --- /dev/null +++ b/get_preview_image.py @@ -0,0 +1,136 @@ +import platform,sys,redis,time,requests,json,atexit +import os, sys, time, argparse, requests, json, re, oss2 +import bpy +import open3d as o3d +import shutil +import psutil +import trimesh +from PIL import Image +from pyvirtualdisplay import Display +# os.environ['DISPLAY'] = ':1' +AccessKeyId = 'LTAI5tSReWm8hz7dSYxxth8f' +AccessKeySecret = '8ywTDF9upPAtvgXtLKALY2iMYHIxdS' +Endpoint = 'oss-cn-shanghai.aliyuncs.com' +Bucket = 'suwa3d-website' +oss_client = oss2.Bucket(oss2.Auth(AccessKeyId, AccessKeySecret), Endpoint, Bucket) + +# 创建虚拟显示 +display = Display(visible=0, size=(500, 500)) +display.start() + +# pid = sys.argv[1] +# target_folder = '/data/datasets/complate/preview' +# source_folder = f'/data/datasets/complate/objs/{pid}' + +# 定义函数来递归遍历文件夹下的所有文件 +def list_files(directory): + for root, dirs, files in os.walk(directory): + for file in files: + yield os.path.join(root, file) + + +def trim_whitespace(image_path, output_path): + # 打开图片 + image = Image.open(image_path) + + # 获取图片的像素数据 + data = image.getdata() + + # 检测图片边界 + left = image.width + right = 0 + top = image.height + bottom = 0 + for x in range(image.width): + for y in range(image.height): + # 如果像素不是白色,则更新边界坐标 + if data[x + y * image.width] != (255, 255, 255, 255): + left = min(left, x) + right = max(right, x) + top = min(top, y) + bottom = max(bottom, y) + + # 裁剪图片 + image = image.crop((left, top, right + 1, bottom + 1)) + + # 保存裁剪后的图片 + image.save(output_path) + +def remove_background(image_path, output_path): + # 打开图片 + image = Image.open(image_path) + + # 将图片转换为带有透明通道的 RGBA 模式 + image = image.convert("RGBA") + + # 获取图片的像素数据 + data = image.getdata() + + # 新的像素数据列表 + new_data = [] + + # 将白色背景的像素转换为透明 + for item in data: + # 如果像素为白色,则设置为完全透明 + if item[:3] == (255, 255, 255): + new_data.append((255, 255, 255, 0)) + else: + new_data.append(item) + + # 更新图片的像素数据 + image.putdata(new_data) + + # 保存图片 + image.save(output_path, "PNG") + +#生成预览图 +def createImage(pid): + + + target_folder = '/data/datasets/complate/preview' + source_folder = f'/data/datasets/complate/objs/{pid}' + + for file_path in list_files(source_folder): + if ".obj" not in file_path: + continue + #print("文件路径:",file_path) + #判断文件是否存在 + if not os.path.exists(file_path): + continue + + + + # 加载OBJ文件 + mesh = trimesh.load_mesh(file_path) + + # 生成封面图 + image = mesh.scene().save_image(resolution=[400, 300]) + # 转换为Pillow的Image对象 + # pil_image = Image.frombytes('RGB', (image.width, image.height), image.data) + # # 保存封面图 + # pil_image.save(f'{target_folder}/{pid}_preview.png') + #保存封面图 + with open(f'{target_folder}/{pid}_preview.png', 'wb') as f: + f.write(image) + + time.sleep(2) + print("处理封面图片.....") + remove_background(f'{target_folder}/{pid}_preview.png',f'{target_folder}/{pid}_preview.png') + print("封面图片处理完成.....") + time.sleep(1) + + oss_client.put_object_from_file(f'print_ticket_view/{pid}_preview.png', f'{target_folder}/{pid}_preview.png') + + break + # 关闭虚拟显示 + #display.stop() +# if __name__ == "__main__": +# AccessKeyId = 'LTAI5tSReWm8hz7dSYxxth8f' +# AccessKeySecret = '8ywTDF9upPAtvgXtLKALY2iMYHIxdS' +# Endpoint = 'oss-cn-shanghai.aliyuncs.com' +# Bucket = 'suwa3d-website' +# oss_client = oss2.Bucket(oss2.Auth(AccessKeyId, AccessKeySecret), Endpoint, Bucket) +# pid = sys.argv[1] +# target_folder = '/data/datasets/complate/preview' +# source_folder = f'/data/datasets/complate/objs/{pid}' +# createImage() \ No newline at end of file diff --git a/libs/common.py b/libs/common.py new file mode 100644 index 0000000..50992fc --- /dev/null +++ b/libs/common.py @@ -0,0 +1,432 @@ +import redis,sys,os,re,oss2,shutil,time,cv2 +import json +import requests +import platform +import numpy as np +import xml.etree.ElementTree as ET +from PIL import ImageGrab +if platform.system() == 'Windows': + #sys.path.append('e:\\libs\\') + sys.path.append('libs') +else: + sys.path.append('/home/acprint/code/libs/') +import config,libs + +#判断模型是需要高精模 或者是 需要photo3 参与 +def task_need_high_model_or_photo3(pid): + resHigh = task_need_high_model(pid) + resPhoto3 = task_need_photo3(pid) + if resHigh or resPhoto3: + return True + else: + return False + +#判断是否需要高精模 +def task_need_high_model(pid): + redis_conn = config.redis_local_common + #判断在redis中是否有高精模和 需要photo3 参与的 task + if redis_conn.sismember("calculateHighModel",pid): + return True + #判断是否需要高精模 + if redis_conn.sismember("calculateHighModel_no",pid): + return False + + + #判断是否需要高精模 + if libs.aliyun_face(pid): + #calulate_type = 'calculateHighModel' + redis_conn.sadd("calculateHighModel",pid) + return True + else: + redis_conn.sadd("calculateHighModel_no",pid) + return False + +#判断是否需要photo3参与建模 +def task_need_photo3(pid): + redis_conn = config.redis_local_common + #判断在redis中是否有高精模和 需要photo3 参与的 task + if redis_conn.sismember("photo3",pid): + return True + #判断是否需要photo3参与建模 + if redis_conn.sismember("photo3_no",pid): + return False + if os.path.exists(os.path.join(config.workdir, pid, 'photo3')): + redis_conn.sadd("photo3",pid) + return True + else: + redis_conn.sadd("photo3_no",pid) + return False + +#拷贝远程主机上的指定目录到本地指定目录 +def copy_remote_directory(remote_host, remote_path, local_path): + # 建立 SSH 连接 + ssh = paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect(remote_host, username='your_username', password='your_password') + + # 创建 SFTP 客户端 + sftp = ssh.open_sftp() + + # 获取远程目录下的所有文件/子目录 + file_list = sftp.listdir(remote_path) + + # 遍历远程目录中的每个文件/子目录 + for file_name in file_list: + remote_file_path = os.path.join(remote_path, file_name) + local_file_path = os.path.join(local_path, file_name) + + # 判断当前项是文件还是目录 + if sftp.stat(remote_file_path).st_isdir(): + # 如果是目录,递归调用函数进行拷贝 + os.makedirs(local_file_path, exist_ok=True) + copy_remote_directory(remote_host, remote_file_path, local_file_path) + else: + # 如果是文件,直接拷贝到指定目录 + sftp.get(remote_file_path, local_file_path) + + # 关闭 SFTP 客户端和 SSH 连接 + sftp.close() + ssh.close() + +#移除redis中的高精模和 需要photo3 参与的 task +# def remove_redis_high_model_or_photo3(pid): +# redis_conn = config.redis_local_common +# redis_conn.srem("calculateHighModel",pid) +# redis_conn.srem("photo3",pid) + + +# if __name__ == '__main__': +# redis_conn = config.redis_local_common +# print(redis_conn.sismember("photo3_no","1")) + +#读取rcbox文件进行修改指定的值 + +def change_rcbox_s(pid,new_value): + rcbox_path = os.path.join(config.workdir, pid, f"{pid}.rcbox") + old_value_pattern = r'(.*?)' + #读取文件内容 + with open(rcbox_path, 'r') as f: + content = f.read() + #使用正则表达式进行匹配 + match = re.search(old_value_pattern,content) + if match: + old_value = match.group(1) + if old_value == "": + return + #分割字符串 + arrStr = old_value.split(" ") + #重新拼接字符串 + strs = arrStr[0]+" "+arrStr[1]+" "+str(float(arrStr[2])+new_value) + new_content = re.sub(old_value_pattern,f'{strs}',content) + #重新写入进去 + with open(rcbox_path, 'w') as f: + f.write(new_content) + +def change_rcbox_center(pid,new_value): + rcbox_path = os.path.join(config.workdir, pid, f"{pid}.rcbox") + old_value_pattern = r'
(.*?)
' + #读取文件内容 + with open(rcbox_path, 'r') as f: + content = f.read() + #使用正则表达式进行匹配 + match = re.search(old_value_pattern,content) + if match: + old_value = match.group(1) + if old_value == "": + return + #分割字符串 + arrStr = old_value.split(" ") + #重新拼接字符串 + strs = arrStr[0]+" "+arrStr[1]+" "+str(new_value) + new_content = re.sub(old_value_pattern,f'
{strs}
',content) + #重新写入进去 + with open(rcbox_path, 'w') as f: + f.write(new_content) + +#修改rcproj文件,删除没有模型的component,保留最多model 的component +def changeRcprojFile(pid): + # 解析XML文件 + file_path = os.path.join(config.workdir, pid, f'{pid}.rcproj') + #判断文件是否存在 + if not os.path.exists(file_path): + return False + tree = ET.parse(file_path) + root = tree.getroot() + # 遍历所有的reconstructions节点 + for reconstruction in root.findall('reconstructions'): + for component in reconstruction.findall('component'): + if component.find('model') == None: + reconstruction.remove(component) + continue + # 获取所有包含model标签的component节点 + components_with_model = [component for component in reconstruction.findall('component') if component.find('model') is not None] + print(components_with_model) + # 如果包含model标签的component节点数量大于1,则按照model数量降序排序 + if len(components_with_model) > 1: + components_with_model.sort(key=lambda x: len(x.findall('model')), reverse=False) + + for i in range(len(components_with_model)-1): + reconstruction.remove(components_with_model[i]) + + # 保存修改后的XML文件 + tree.write(file_path) + return True +#修改 rcproj 文件中的 controlpoints 文件的引用 +def changeRcprojControlpointsFile(pid): + psid = libs.getPSid(pid) + # 解析XML文件 + file_path = os.path.join(config.workdir, str(pid), f'{pid}.rcproj') + #判断文件是否存在 + if not os.path.exists(file_path): + return False + #下载指定的psid的 points文件到本地 + flag = down_points_from_oss(pid,psid) + if flag == False: + return False + tree = ET.parse(file_path) + root = tree.getroot() + # 遍历所有的reconstructions节点 + for controlpoints in root.findall('controlpoints'): + #修改 controlpoints 标签内容里 双引号的内容 + controlpoints.set('fileName',f'controlpoints_{psid}.dat') + # 保存修改后的XML文件 + tree.write(file_path) + return True + + +def down_points_from_oss(pid,psid): + # 根据前缀获取文件列表 + prefix = f'points/{psid}/' + filelist = oss2.ObjectIteratorV2(config.oss_bucket, prefix=prefix) + flag = False + for file in filelist: + filename = file.key.split('/')[-1] + if filename.endswith('.dat'): + # print('正在下载:', file.key) + localfile = os.path.join(config.workdir,str(pid), filename) + config.oss_bucket.get_object_to_file(file.key, localfile) + flag = True + return flag + +#判断oss上是否存在指定的controlpoints文件 +def isExistControlPointsOss(pid): + return False + psid = libs.getPSid(pid) + filePath = f'points/{psid}/controlpoints_{psid}.dat' + #判断oss上是否存在 + if config.oss_bucket.object_exists(filePath): + return True + else: + return False + +#将本地的controlpoints文件上传到oss上 +def uploadControlPointsOss(pid): + psid = libs.getPSid(pid) + filePath = f'points/{psid}/controlpoints_{psid}.dat' + localfile = os.path.join(config.workdir,str(pid), f'{str(pid)}_wait/controlpoints0.dat') + #进行上传 + config.oss_bucket.put_object_from_file(filePath, localfile) + +#截屏保存 +def saveScreenImg(pid): + #判断pid 是否是数字 + if str(pid).isdigit() == False: + return + + if pid == 0 or pid == "": + return "pid 等于空" + #获取当前的日志 + if not os.path.exists(os.path.join(config.workdir,"screen", time.strftime("%y%m%d",time.localtime()))): + os.makedirs(os.path.join(config.workdir,"screen", time.strftime("%y%m%d",time.localtime()))) + screenshot = ImageGrab .grab() + screenshot.save(os.path.join(config.workdir,"screen", time.strftime("%y%m%d",time.localtime()))+"/"+str(pid)+".png") + #移动到e盘 + if not os.path.exists(os.path.join(config.sharedir,"screen", time.strftime("%y%m%d",time.localtime()))): + os.makedirs(os.path.join(config.sharedir,"screen", time.strftime("%y%m%d",time.localtime()))) + shutil.copy(os.path.join(config.workdir,"screen", time.strftime("%y%m%d",time.localtime()))+"\\"+str(pid)+".png", os.path.join(config.sharedir,"screen", time.strftime("%y%m%d",time.localtime()))) + + +#文件夹的移动和删除 +def removeFolder(pid): + #判断是否存在finished文件夹,没有则创建 + if not os.path.exists(os.path.join(config.workdir, 'finished')): + os.makedirs(os.path.join(config.workdir, 'finished')) + #移动文件夹到指定路径,如果已经存在了就删除再移动 + if os.path.exists(os.path.join(config.workdir, 'finished', pid)): + shutil.rmtree(os.path.join(config.workdir, 'finished', pid), ignore_errors=True) + shutil.move(os.path.join(config.workdir, pid), os.path.join(config.workdir, 'finished')) + #遍历finished 里的文件夹,超过三天的就都删除 + for file in os.listdir(os.path.join(config.workdir, 'finished')): + if os.path.isdir(os.path.join(config.workdir, 'finished', file)): + file_time = os.path.getmtime(os.path.join(config.workdir, 'finished', file)) + now_time = time.time() + if (now_time - file_time) > 259200: + shutil.rmtree(os.path.join(config.workdir, 'finished', file), ignore_errors=True) + + +def find_last_x(image, slope_threshold = 1000): + x = [] + y = [] + hist, bins = np.histogram(image, bins=256, range=[0, 256]) + + #找到50以内的最高峰 + max_y = 0 + max_i = 5 + for i in range(5, 50): + if hist[i] > max_y: + max_y = hist[i] + max_i = i + print(f'50以内最高峰值y:{max_y},最高峰位置x:{max_i}') + + for i in range(2, max_i): + x.append(i) + y.append(hist[i]) + slopes = [abs(y[i + 1] - y[i]) for i in range(len(x) - 1)] + + current_interval = [] + max_interval = [] + max_x = {} + for i, slope in enumerate(slopes): + current_interval.append(slope) + if slope >= slope_threshold: + if len(current_interval) > len(max_interval): + max_interval = current_interval.copy() + max_x[x[i]] = slope + current_interval = [] + + print(max_x) + last_x = list(max_x)[-1] + last_y = max_x[last_x] + return last_x, last_y + +def high_find_histogram_range(image, target_frequency): + ''' + 循环查找在 target_frequency (y)频次限制下的直方图区间值(x) + :param image: 导入图片 + :param target_frequency: 直方图 y 频次限制条件 + :return: 直方图区间 x,和 该区间频次 y + ''' + # 计算灰度直方图 + hist, bins = np.histogram(image, bins=256, range=[0, 256]) + # 初始化区间和频次 + interval = 255 + frequency = hist[255] + while frequency < target_frequency: + # 更新区间和频次 + interval -= 1 + # 检查直方图的频次是否为None,如果频次是None,则将其设为0,这样可以避免将None和int进行比较报错。 + frequency = hist[interval] if hist[interval] is not None else 0 + frequency += hist[interval] if hist[interval] is not None else 0 + # 如果频次接近10000则停止循环 + if target_frequency - 2000 <= frequency <= target_frequency + 2000: + break + + return interval, frequency + +def ps_color_scale_adjustment(image, shadow=0, highlight=255, midtones=1): + ''' + 模拟 PS 的色阶调整; 0 <= Shadow < Highlight <= 255 + :param image: 传入的图片 + :param shadow: 黑场(0-Highlight) + :param highlight: 白场(Shadow-255) + :param midtones: 灰场(9.99-0.01) + :return: 图片 + ''' + if highlight > 255: + highlight = 255 + if shadow < 0: + shadow = 0 + if shadow >= highlight: + shadow = highlight - 2 + if midtones > 9.99: + midtones = 9.99 + if midtones < 0.01: + midtones = 0.01 + image = np.array(image, dtype=np.float16) + # 计算白场 黑场离差 + Diff = highlight - shadow + image = image - shadow + image[image < 0] = 0 + image = (image / Diff) ** (1 / midtones) * 255 + image[image > 255] = 255 + image = np.array(image, dtype=np.uint8) + + return image + + + +def remove_gray_and_sharpening(jpg_path): + #low_y_limit = 25000 + high_y_limit = 13000 + input_image = cv2.imread(jpg_path) + # low_x_thresh, low_y_frequency = low_find_histogram_range(input_image, low_y_limit) + low_x_thresh, low_y_frequency = find_last_x(input_image, 1000) + high_x_thresh, high_y_frequency = high_find_histogram_range(input_image, high_y_limit) + print(f"{low_x_thresh} 区间, {low_y_frequency} 频次") + print(f"{high_x_thresh} 区间, {high_y_frequency} 频次") + high_output_image = ps_color_scale_adjustment(input_image, shadow=low_x_thresh, highlight=high_x_thresh, midtones=1) + cv2.imwrite(jpg_path, high_output_image, [cv2.IMWRITE_JPEG_QUALITY, 95]) # 保存图片的质量是原图的 95% + +#读取指定路径判断是否对齐成功 +def isAlignNums(strPid): + #拼接路径 + csvPath = os.path.join(config.workdir, strPid, strPid+"_align.csv") + print(csvPath) + #判断文件是否存在 + if os.path.exists(csvPath) == False: + return "rebuild" + #读取文件行数 + lines = "" + with open(csvPath, 'r') as f: + lines = f.readlines() + #获取长度 + lines = len(lines) - 1 + #获取pid对应的 photo1 和 photo2 的数量 + photo1Num = 0 + photo2Num = 0 + for file in os.listdir(os.path.join(config.workdir, strPid, 'photo1')): + if file.endswith('.jpg'): + photo1Num += 1 + + for file in os.listdir(os.path.join(config.workdir, strPid, 'photo2')): + if file.endswith('.jpg'): + photo2Num += 1 + #获取图片总数 + totalPhotos = photo1Num + photo2Num + #比对对齐数量 + if totalPhotos - lines >= 4: + return "rebuild" + + return True + + +#消息通知 +def notify(content): + + if content == "": + return "content 不能为空" + + for user_agent_id in config.notify_user_Ids: + data = { + 'userId': user_agent_id, + 'message': content, + } + headers = {'Content-Type': 'application/json'} + message_send_url = "https://mp.api.suwa3d.com/api/qyNotify/sendMessage?userId="+user_agent_id+"&message="+content + response = requests.post(message_send_url, data=json.dumps(data), headers=headers) \ No newline at end of file diff --git a/libs/computerRecboxCenterByPoint.py b/libs/computerRecboxCenterByPoint.py new file mode 100644 index 0000000..92065b2 --- /dev/null +++ b/libs/computerRecboxCenterByPoint.py @@ -0,0 +1,86 @@ +import open3d as o3d +import numpy as np +import matplotlib.pyplot as plt +import sys +import statistics +import math +import copy +import os,time +from scipy.stats import mode +from scipy.spatial import KDTree +from scipy.spatial import cKDTree +import matplotlib.font_manager as fm + +#处理点云文件保存脚踝一下的部分点云 +def dealPointData(pidPath,pidNewPath): + # 读取 PLY 文件 + point_cloud = o3d.io.read_point_cloud(pidPath) + # 移除离散点 + cl, ind = point_cloud.remove_statistical_outlier(nb_neighbors=20, std_ratio=0.06) + point_cloud = point_cloud.select_by_index(ind) + # 保存处理后的点云 + o3d.io.write_point_cloud(pidNewPath, point_cloud) + data = np.loadtxt(pidNewPath) + # 提取xyz坐标 + x = data[:, 0] + y = data[:, 1] + z = data[:, 2] + # 创建一个布尔索引,筛选出在x-y平面内,半径在0.5范围内的点 (z >= 0.00) & + mask = (z >=0) & (z <= 0.1) & (x**2 + y**2 <= 0.7**2) + # 根据索引,保留符合条件的点 + filtered_data = data[mask] + # 将保留的点云数据保存到新的文件 + np.savetxt(pidNewPath, filtered_data, delimiter=' ') + + +#计算点云的数据中,脚底和地板的切线位置 +def boxCenter(pid): + pidPath = "D:\\xyz\\" + str(pid) + "_point.xyz" + pidNewPath = "D:\\xyz\\" + str(pid) + "_new.xyz" + dealPointData(pidPath,pidNewPath) + + pcd = o3d.io.read_point_cloud(pidNewPath) + allPoints = np.asarray(pcd.points) + if len(allPoints) == 0: + print("点云为空,无法计算均值。") + return 0 + + # 使用RANSAC算法提取平面 + plane_model, inliers = pcd.segment_plane(distance_threshold=0.00670, ransac_n=3, num_iterations=1000) + inlier_cloud = pcd.select_by_index(inliers) + # 获取被染成红色的点的坐标数组 + red_points = inlier_cloud.points + red_points_np = np.asarray(red_points) + # 获取 red_points_np 的 z 值数组 + z_red_values = red_points_np[:, 2] + # 计算中位数 + medianRed = np.median(z_red_values) + # 计算中位数向上的平均数 + meanRedUp = np.mean(z_red_values[z_red_values > medianRed]) + #计算中位数向下的平均数 + #meanRedDown = np.mean(z_red_values[z_red_values < medianRed]) + + # Exclude ground points from allPoints + foot_points_np = np.asarray([point for point in allPoints if point not in red_points_np]) + if len(foot_points_np) == 0 : + print("脚部点云为空。") + return 0 + #过滤掉地板,计算出脚底的点云数据 + points_np_foot = foot_points_np[(foot_points_np[:, 2] < 0.046) & (foot_points_np[:, 2] > meanRedUp)] + # 计算平均值 + mean = np.mean(points_np_foot[:, 2]) + + # 按照 Z 值进行排序 + sorted_points = points_np_foot[np.argsort(points_np_foot[:, 2])] + # 获取 Z 值最低的前十个点 + if len(sorted_points) == 0: + print("脚底板最低的点云为空") + return 0 + try: + lowest_points = sorted_points[10:20] + except Exception as e: + print("获取脚底板最低的倒数第十到20的点出现异常错误") + return 0 + # 计算平均值 + meanLowEst = np.mean(lowest_points[:, 2]) + 1.05 + return meanLowEst \ No newline at end of file diff --git a/libs/config.py b/libs/config.py new file mode 100644 index 0000000..63bc97f --- /dev/null +++ b/libs/config.py @@ -0,0 +1,140 @@ +import oss2, redis, platform + +baidu_api = { + 'face': { + 'app_id': '26878271', + 'api_key': '01CQzxLCpGrLjGe2ClKTC8hx', + 'secret_key': '56WHgdlEvGG4iA9KAEn51naiXy31ybKa', + } +} + +ali_oss = { + 'access_key_id': 'LTAI5tSReWm8hz7dSYxxth8f', + 'access_key_secret': '8ywTDF9upPAtvgXtLKALY2iMYHIxdS', + 'facebody_endpoint': 'facebody.cn-shanghai.aliyuncs.com', + 'endpoint': 'oss-cn-shanghai.aliyuncs.com', + 'bucket_name': 'suwa3d-securedata', +} +oss_bucket = oss2.Bucket(oss2.Auth(ali_oss['access_key_id'], ali_oss['access_key_secret']), ali_oss['endpoint'], ali_oss['bucket_name']) + +redis_remote = redis.Redis(host='106.14.158.208', password='kcV2000', port=6379, db=6) +redis_local = redis.Redis(host='172.16.20.13', password='ph2008', port=6379, db=0) +redis_local_common = redis.Redis(host='172.16.20.13', password='ph2008', port=6379, db=1) +mysql_local = { + "host": "172.16.20.13", + "port": 3306, + "user": "pi", + "password": "ph2008", + "db": "suwa3d", + "charset": "utf8mb4" +} + +if platform.system() == 'Windows': + workdir = 'D:\\' + sharedir = 'E:\\' + rcbin = '"C:\\Program Files\\Capturing Reality\\RealityCapture\\RealityCapture.exe"' +else: + workdir = '/data/datasets/' + +urls = { + 'update_status_modeling_url': 'https://mp.api.suwa3d.com/api/customerP3dLog/toModeling', + 'update_status_modelsuccess_url': 'https://repair.api.suwa3d.com/api/modelRepairOrder/toModelMakeSucceed', + 'update_status_modelfailed_url': 'https://mp.api.suwa3d.com/api/customerP3dLog/toModelMakeFailed', + 'get_psid_url': 'https://mp.api.suwa3d.com/api/customerP3dLog/photoStudio', + 'get_printinfo_url': 'https://mp.api.suwa3d.com/api/customerP3dLog/printInfo', + 'update_status_printstatus_url': 'https://mp.api.suwa3d.com/api/customerP3dLog/updateBuildPrintModelStatus', + 'get_ps_adjust_photo_para_url': 'https://mp.api.suwa3d.com/api/equipment/configForColor', + 'get_ps_type_url' : 'https://mp.api.suwa3d.com/api/takephotoOrder/photoStudioInfo', + 'get_printsize_url' : 'https://mp.api.suwa3d.com/api/printOrder/info', + 'upload_model_info_url' : 'https://mp.api.suwa3d.com/api/physical/add', +} + +r = { + "setTextureTrue" : "-selectAllImages -enableTexturingAndColoring true", + "setTextureFalse" : "-selectAllImages -enableTexturingAndColoring false", +} + +r1 = { + "init" : "-disableOnlineCommunication -set \"sfmEnableCameraPrior=False\" -set \"sfmMaxFeaturesPerMpx=20000\" -set \"sfmMaxFeaturesPerImage=200000\" -set \"sfmImagesOverlap=High\" -set \"sfmMaxFeatureReprojectionError=1\"", +} + +r2 = { + "init" : "-disableOnlineCommunication -setProjectCoordinateSystem Local:1 -setOutputCoordinateSystem epsg:4326 -set \"sfmEnableCameraPrior=False\" -set \"sfmMaxFeaturesPerMpx=20000\" -set \"sfmMaxFeaturesPerImage=200000\" -set \"sfmImagesOverlap=High\" -set \"sfmMaxFeatureReprojectionError=1\"", + "setRegion" : "-setReconstructionRegionOnCPs 36h11:001 36h11:002 36h11:003 2.1 -moveReconstructionRegion 0 0 -2.1 -rotateReconstructionRegion 180 0 180 -setGroundPlaneFromReconstructionRegion -scaleReconstructionRegion 1.8 1.6 2.1 absolute center -moveReconstructionRegion 0 0 0.0025" +} + +#修改为走数据库配置 +# 影棚地贴版本定义 +# floor_sticker_distances = { +# # 影棚地贴版本1:老圆形影棚,二维码排序1、2、3、4、1,间距1米 +# "v1" : "36h11:001 36h11:002 1;36h11:002 36h11:003 1;36h11:003 36h11:004 1;36h11:004 36h11:001 1", +# # 影棚地贴版本2:新方形影棚,二维码排序1、2、4、3、1,间距1米 default +# "v2" : "36h11:001 36h11:002 1;36h11:002 36h11:004 1;36h11:004 36h11:003 1;36h11:003 36h11:001 1", +# # 影棚地贴版本3:新圆形影棚,二维码排序1、2、4、3、1,间距1.5米 +# "v3" : "36h11:001 36h11:002 1.5;36h11:002 36h11:004 1.5;36h11:004 36h11:003 1.5;36h11:003 36h11:001 1.5", +# # 影棚地贴版本4:新方形影棚,二维码排序5、6,间距0.21米 +# "v4" : "36h11:005 36h11:006 0.21", +# # 影棚地贴版本5:新方形影棚,二维码排序7、8,间距0.21米 +# "v5" : "36h11:007 36h11:008 0.21" +# } + +#修改为走数据库配置 +# 影棚与地贴版本配置关系 +# ps_floor_sticker = { +# "default" : floor_sticker_distances['v5'], +# "29" : floor_sticker_distances['v1'], +# "44" : floor_sticker_distances['v2'], +# "54" : floor_sticker_distances['v2'], +# "77" : floor_sticker_distances['v2'], +# "79" : floor_sticker_distances['v2'], +# "80" : floor_sticker_distances['v2'], +# "85" : floor_sticker_distances['v4'], +# } + +# 需要加入新建模系统的影棚 +#new_make_psids = ['1', '17', '29', '44', '54', '55', '63', '65', '77', '79', '80', '85', '86'] + +#企业微信通知人员 +notify_user_Ids = ["DongZhangXi","YouShui"] + +#任务运行超时时间设定 ,单位秒 +task_run_timeout = { + "R11":{ + "all":60*40, + "step1":60*4, + "step2":60*12, + "step3":60*12, + }, + "R12":{ + "all":60*40, + "step1":60*4, + "step2":60*12, + "step3":60*12, + }, + "R13":{ + "step2":60*12, + "step3":60*12, + }, + "R14":{ + "step2":60*12, + "step3":60*12, + }, + "R15":{ + "step2":60*12, + "step3":60*12, + }, + "R16":{ + "step2":60*12, + "step3":60*12, + }, + "R17":{ + "step2":60*12, + "step3":60*12, + }, +} + +high_host = ["R11","R12"] +low_host = ["R13","R14","R15","R16","R17"] + +#不参与贴图的照片,使用的时候还需要增加指令 python main_step1.py 123 no_texture +noTextureColorPics = ["13", "23", "31", "52", "72", "82", "142", "173"] #["134","135","136","142","143","14","15","16","24","25","26","44","45"] diff --git a/libs/foot_mark_seam.py b/libs/foot_mark_seam.py new file mode 100644 index 0000000..218e48f --- /dev/null +++ b/libs/foot_mark_seam.py @@ -0,0 +1,149 @@ +import bpy +import bmesh + + +def active_object(obj): + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + + +def get_obj_max_foot(workdir,filename,filename_tex): + # 1.模型导入和初始化 + # 删除当前场景中的所有对象: + # use_global=False表示只删除当前场景中的对象,而不会影响到其他场景中的对象;confirm=False表示删除时不需要确认。 + bpy.ops.object.delete(use_global=False, confirm=False) + bpy.ops.import_scene.obj(filepath=filename) # 导入指定路径的 OBJ 格式模型文件 + + bpy.context.scene.unit_settings.scale_length = 0.001 # 将场景的长度单位缩放为0.001,相当于将长度单位从默认的米缩小为毫米 + bpy.context.scene.unit_settings.length_unit = 'CENTIMETERS' # 将场景的长度单位设置为厘米 + bpy.context.scene.unit_settings.mass_unit = 'GRAMS' # 将场景的质量单位设置为克 + + obj = bpy.context.selected_objects[0] # 获取了当前选中的对象列表,然后通过 [0] 取得列表中的第一个对象 + bpy.context.view_layer.objects.active = obj # 将变量 obj 设置为当前活动对象 + obj.select_set(True) # 将变量 obj 的选择状态设置为 True,表示选中该对象 + pid = obj.name # 获取该对象的名字 + + # 对选定的对象进行对齐操作 + bpy.ops.object.align(align_mode='OPT_1', relative_to='OPT_1', align_axis={'Z'}) + # 设置选中对象的原点,参数1:将原点设置为对象的质心,参数2:使用对象的几何中心作为参考 + bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN') + # 将选中对象的位置坐标分别设置为 (0, 0),即将对象移动到世界坐标系的原点位置 + obj.location[0] = 0 + obj.location[1] = 0 + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) # 将选中对象的位置、旋转和缩放应用到对象的数据中 + + # 2.选择要复制的对象 + obj_duplicate = obj.copy() + obj_duplicate.data = obj.data.copy() + bpy.context.collection.objects.link(obj_duplicate) + # obj_duplicate.location.x += 5.0 + bpy.ops.object.select_all(action='DESELECT') # 取消选中全部对象 + + # 3.处理复制的对象的脚底缝合边 + # 选中复制对象 + bpy.context.view_layer.objects.active = obj_duplicate + obj_duplicate.select_set(True) + + selected_obj = bpy.context.active_object # 获取当前选中的对象 + bpy.context.view_layer.objects.active = selected_obj # 将对象转换到编辑模式 + + # 切换到3D视图编辑模式 + bpy.context.area.type = 'VIEW_3D' + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') # 选择所有的边 + + # 切换到UV编辑器 + bpy.context.area.type = 'IMAGE_EDITOR' + pid_img = f"{pid}Tex1.jpg" + bpy.ops.image.open(filepath=filename_tex, directory=workdir, + files=[{"name": pid_img, "name": pid_img}], relative_path=True, + show_multiview=False) + bpy.context.area.ui_type = 'UV' + bpy.ops.uv.select_all(action='SELECT') # 选择所有UV贴图顶点 + # 标记所有沿孤岛的边为缝合边 + bpy.ops.uv.seams_from_islands() + bpy.context.area.type = 'VIEW_3D' + bpy.ops.object.mode_set(mode='OBJECT') + + # 获取世界坐标系下z轴接近0的顶点的索引 + z_zero_vertex_indices = [] + for i, vertex in enumerate(selected_obj.data.vertices): + world_vertex = selected_obj.matrix_world @ vertex.co + if abs(world_vertex.z) < 0.2: + z_zero_vertex_indices.append(i) + # 将对象转换回对象模式 + bpy.ops.object.mode_set(mode='OBJECT') + # 创建一个新的顶点组,并将z_zero_vertices中的顶点添加到该顶点组中 + vg = selected_obj.vertex_groups.new(name="z_zero_vertices") + for index in z_zero_vertex_indices: + vg.add([index], 1.0, 'REPLACE') + # 将选中的顶点设置为活动顶点 + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='DESELECT') + bpy.ops.object.vertex_group_select() # 选中待处理的顶点 + bpy.ops.mesh.mark_seam(clear=True) # 取消所选区域内的缝合边 + bpy.ops.mesh.region_to_loop() # 选择选定面周围的边界边!!! + bpy.ops.mesh.select_mode(type="EDGE") # 转换为线模式 + bpy.ops.mesh.mark_seam(clear=False) # 标记所选的线为缝合边 + + # 选中脚底顶点组 + bpy.ops.uv.select_all(action='DESELECT') + bpy.ops.object.vertex_group_set_active(group='z_zero_vertices') # 设置活动顶点组 + bpy.ops.object.vertex_group_select() # 选择分配给活动顶点组的所有顶点 + bpy.ops.uv.unwrap() + + # 处理贴图脚底部分孤岛,其他孤岛保持不变 + # (1)反选模型顶点,方便贴图固定不需要处理的区域 + bpy.ops.mesh.select_all(action='INVERT') + bpy.context.area.type = 'IMAGE_EDITOR' # 切换到贴图模式 + bpy.context.area.ui_type = 'UV' + bpy.ops.uv.pin(clear=False) + bpy.ops.object.vertex_group_set_active(group='z_zero_vertices') # 设置活动顶点组 + bpy.ops.object.vertex_group_select() # 选择分配给活动顶点组的所有顶点 + # (2)脚底部位UV展开,平均孤岛比例,重新排列孤岛 + bpy.ops.uv.select_all(action='SELECT') + bpy.ops.uv.average_islands_scale() + bpy.ops.uv.pack_islands(margin=0.001) + + bpy.context.area.type = 'VIEW_3D' + bpy.ops.object.mode_set(mode='OBJECT') + + # 4. 烘焙模式,参数设置 + bpy.ops.object.select_all(action='DESELECT') # 取消选中全部对象 + # # 选中原始对象 + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + # 选中复制对象 + bpy.context.view_layer.objects.active = obj_duplicate + obj_duplicate.select_set(True) + bpy.context.scene.render.engine = 'CYCLES' + bpy.context.scene.cycles.device = 'GPU' + bpy.context.scene.cycles.preview_samples = 1 + bpy.context.scene.cycles.samples = 1 + bpy.context.scene.cycles.bake_type = 'DIFFUSE' + bpy.context.scene.render.bake.use_pass_direct = False + bpy.context.scene.render.bake.use_pass_indirect = False + bpy.context.scene.render.bake.use_selected_to_active = True + bpy.context.scene.render.bake.cage_extrusion = 0.01 + bpy.ops.object.bake(type='DIFFUSE') # 开始 Bake + + # 5. 导出模型和贴图 + bpy.ops.object.select_all(action='DESELECT') # 取消选中全部对象 + # 选中复制对象 + bpy.context.view_layer.objects.active = obj_duplicate + obj_duplicate.select_set(True) + bpy.ops.wm.obj_export(filepath=filename, export_selected_objects=True) + bpy.context.area.type = 'IMAGE_EDITOR' # 切换到 + bpy.ops.image.save_as(filepath=filename_tex) + bpy.context.area.type = 'TEXT_EDITOR' # 切换到文本编辑器 + + +if __name__ == '__main__': + workdir = 'E:\\117080\\print_model' + # filename = f'{workdir}\\117080_12cm_x1.obj' + filename = f'{workdir}\\117080.obj' + filename_tex = f'{workdir}\\117080Tex1.jpg' + save_obj = f"{workdir}\\bake\\117080Tex1.obj" + save_tex = f"{workdir}\\bake\\117080Tex1.jpg" + + get_obj_max_foot() \ No newline at end of file diff --git a/libs/install.txt b/libs/install.txt new file mode 100644 index 0000000..0dc7a86 --- /dev/null +++ b/libs/install.txt @@ -0,0 +1,13 @@ +pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple +pip config set install.trusted-host pypi.tuna.tsinghua.edu.cn + +python -m pip install --upgrade pip +pip install oss2 redis MySQLdb pillow numpy opencv-python bpy tqdm pyautogui psutil pywin32 pymysql + + +config +set bin="C:\Program Files\Capturing Reality\RealityCapture\RealityCapture.exe" +%bin% -disableOnlineCommunication -setInstanceName %pid% +%bin% -disableOnlineCommunication -delegateTo %pid% +%bin% -set "appCacheLocation=ProjectFolder" + diff --git a/libs/libs.py b/libs/libs.py new file mode 100644 index 0000000..70f12a6 --- /dev/null +++ b/libs/libs.py @@ -0,0 +1,309 @@ +import os, time, json, requests, shutil, oss2, psutil +from tqdm import tqdm +from PIL import Image, ImageEnhance +import config,libs_db,common +import threading +from concurrent.futures import ThreadPoolExecutor + +def find_blender_bin_path(): + base_path = 'C:\\Program Files\\Blender Foundation\\' + if os.path.exists(base_path): + for dir in os.listdir(base_path): + if dir.startswith('Blender'): + blender_bin_path = base_path + dir + '\\blender.exe' + return f'"{blender_bin_path}"' + else: + print('未找到blender安装目录') + exit(1) + +def resize_photos(photo_path, ratio=0.5): + for filename in os.listdir(photo_path): + if filename.endswith('.jpg'): + img = Image.open(os.path.join(photo_path, filename)) + img = rotate_image(img) + w, h = img.size + img = img.resize((int(w * ratio), int(h * ratio))) + img.save(os.path.join(photo_path, filename)) + +def rotate_image(image): + # 检查图像的EXIF数据是否包含方向信息 + try: + exif = image._getexif() + orientation = exif.get(0x0112) + except: + orientation = None + + # 根据方向信息旋转图像 + if orientation == 3: + image = image.rotate(180, expand=True) + elif orientation == 6: + image = image.rotate(270, expand=True) + elif orientation == 8: + image = image.rotate(90, expand=True) + return image + +def get_ps_adjust_photo_para(psid): + res = requests.get(config.urls['get_ps_adjust_photo_para_url'], params={'id': psid}) + print(res.json()) + paras = res.json()['data'] + brightness_factor, saturation_factor, temperature_factor = float(paras['brightness']), float(paras['saturation']), float(paras['colorTemperature']) + return brightness_factor, saturation_factor, temperature_factor + +def adjust_photos(workdir, pid): + def adjust_brightness(image, brightness_factor): + if brightness_factor == 1 or brightness_factor == 0 : + return image + enhancer = ImageEnhance.Brightness(image) + adjusted_image = enhancer.enhance(brightness_factor) + return adjusted_image + + def adjust_saturation(image, saturation_factor): + if saturation_factor == 1: + return image + enhancer = ImageEnhance.Color(image) + adjusted_image = enhancer.enhance(saturation_factor) + return adjusted_image + + def adjust_temperature(image, temperature_factor): + if temperature_factor == 1: + return image + r, g, b = image.split() + r = r.point(lambda i: i * temperature_factor) + adjusted_image = Image.merge("RGB", (r, g, b)) + return adjusted_image + if not os.path.exists(os.path.join(workdir, pid, 'photo2')): + print(f"Directory {os.path.join(workdir, pid, 'photo2')} does not exist") + return False + + psid = getPSid(pid) + brightness_factor, saturation_factor, temperature_factor = get_ps_adjust_photo_para(psid) + + if (brightness_factor == 1 and saturation_factor == 1 and temperature_factor == 1): + print("No need to adjust") + return False + if os.path.exists(os.path.join(workdir, pid, 'photo3')): + print(f'{os.path.join(workdir, pid, "photo3")}目录已存在,跳过') + return + os.makedirs(os.path.join(workdir, pid, 'photo3'), exist_ok=True) + + print(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} 开始调整图片曝光...') + start_time = time.time() + for filename in os.listdir(os.path.join(workdir, pid, 'photo2')): + if filename.endswith(".jpg"): + # print(f"Adjusting {filename}:brightness={brightness_factor}, saturation={saturation_factor}, temperature={temperature_factor}") + image = Image.open(os.path.join(workdir, pid, 'photo2', filename)) + image = rotate_image(image) + + brightened_image = adjust_brightness(image, brightness_factor) + saturated_image = adjust_saturation(brightened_image, saturation_factor) + adjusted_image = adjust_temperature(saturated_image, temperature_factor) + + adjusted_image.save(os.path.join(workdir, pid, 'photo3', filename)) + print(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} 图片曝光调整完成,共费时{diff_time(start_time)}') + return True + +def getPSid(pid): + res = requests.get(config.urls['get_psid_url'], params={'pid': pid}) + print('get_psid_url:', res.url) + print('res:', res.text) + res = json.loads(res.text) + return str(res['data']) + +def getHeadCount(pid): + res = requests.get(config.urls['get_printinfo_url'], params={'id': pid}) + print('get_printinfo_url:', res.url) + print('res:', res.text) + if res.status_code != 200: + print('获取人数失败,程序退出') + exit(1) + res = json.loads(res.text) + return res['data']['headcount'] + +def get_ps_type(pid): + # return 1:圆形影棚 2:方形影棚 + res = requests.get(config.urls['get_ps_type_url'], params={'pid': pid}) + return res.json()['data']['type'] + +def find_valid_camera_on_oss(pid): + if get_ps_type(pid) == 1: + print('当前拍照影棚为:圆形影棚') + cameras = (103, 93, 113) + else: + print('当前拍照影棚为:方形影棚') + cameras = (74, 64, 84) + find_camera = 0 + for camera in cameras: + objectkey = f'photos/{pid}/photo2/{camera}_8.jpg' + find = config.oss_bucket.object_exists(objectkey) + if find: + find_camera = camera + break + + print('找到有效正脸相机:', find_camera) + if find_camera == 0: + print('{cameras}没有找到照片,程序退出') + exit(1) + return find_camera + +def aliyun_face(pid): + high = False + style = 'imm/detectface' + camera = find_valid_camera_on_oss(pid) + objectkey = f'photos/{pid}/photo2/{camera}_8.jpg' + try: + res = config.oss_bucket.get_object(objectkey, process=style) + except oss2.exceptions.NoSuchKey: + print('没有找到文件:', objectkey) + return high + res = json.loads(res.read()) + if res['success']: + if res['Faces'] is None: + print('no face') + return None + else: + print('faces num:', len(res['Faces'])) + for face in res['Faces']: + print('-' * 20) + print('face_id:', face['FaceId']) + print('gender:', face['Gender']) + print('age:', face['Age']) + + if face['Gender'] == 'FEMALE' and face['Age'] < 22: high = True + if face['Gender'] == 'MALE' and face['Age'] < 15: high = True + else: + print('face detect failed...') + return high + +def down_obj_from_oss(workdir, pid, action): + if os.path.exists(os.path.join(workdir, action, pid)): + print(f'目录{os.path.join(workdir, action, pid)}已存在,跳过') + return + else: + os.makedirs(os.path.join(workdir, action, pid)) + + # 根据前缀获取文件列表 + prefix = f'objs/{action}/{pid}/' + filelist = oss2.ObjectIteratorV2(config.oss_bucket, prefix=prefix) + print('正在下载:', prefix) + obj_filename = "" + for file in filelist: + filename = file.key.split('/')[-1] + if filename.endswith('.obj'): + obj_filename = filename + # print('正在下载:', file.key) + localfile = os.path.join(workdir, action, pid, filename) + config.oss_bucket.get_object_to_file(file.key, localfile) + + return obj_filename + +def set_photos_join_type(workdir, pid, photoN, mesh = '0', texture='0'): + photoN_path = os.path.join(workdir, pid, photoN) + for xmp in os.listdir(photoN_path): + if xmp.endswith('.xmp'): + xmp_path = os.path.join(photoN_path, xmp) + with open(xmp_path, 'r') as f: + lines = f.readlines() + lines = [line.replace('xcr:InMeshing="0"', f'xcr:InMeshing="{mesh}"') for line in lines] + lines = [line.replace('xcr:InMeshing="1"', f'xcr:InMeshing="{mesh}"') for line in lines] + lines = [line.replace('xcr:InTexturing="0"', f'xcr:InTexturing="{texture}"') for line in lines] + lines = [line.replace('xcr:InTexturing="1"', f'xcr:InTexturing="{texture}"') for line in lines] + with open(xmp_path, 'w') as f: + f.writelines(lines) + +def set_photo_join_type(workdir, pid, photoN, camera_id, mesh = '0', texture='0'): + if photoN == 'photo1': + filename = os.path.join(workdir, pid, photoN, f'{camera_id}_1.xmp') + else: + filename = os.path.join(workdir, pid, photoN, f'{camera_id}_8.xmp') + with open(filename, 'r') as f: + lines = f.readlines() + lines = [line.replace('xcr:InMeshing="0"', f'xcr:InMeshing="{mesh}"') for line in lines] + lines = [line.replace('xcr:InMeshing="1"', f'xcr:InMeshing="{mesh}"') for line in lines] + lines = [line.replace('xcr:InTexturing="0"', f'xcr:InTexturing="{texture}"') for line in lines] + lines = [line.replace('xcr:InTexturing="1"', f'xcr:InTexturing="{texture}"') for line in lines] + with open(filename, 'w') as f: + f.writelines(lines) + +def down_from_oss(oss_client, workdir, pid, per=100,photoPath=""): + start_time = time.time() + path = os.path.join(workdir, pid) + if os.path.exists(path): + print(f"Directory {path} already exists, skip") + return + os.makedirs(os.path.join(path, 'photo1')) + os.makedirs(os.path.join(path, 'photo2')) + + psid = getPSid(pid) + # 根据前缀获取文件列表 + prefix = f'photos/{pid}/' + filelist = oss2.ObjectIteratorV2(oss_client, prefix=prefix) + for file in tqdm(filelist): + filename = file.key.split('/')[-1] + localfile = "" + # print('正在下载:', file.key) + if photoPath == "": + if filename.endswith('_1.jpg'): + localfile = os.path.join(path, 'photo1', filename) + else: + localfile = os.path.join(path, 'photo2', filename) + else: + if photoPath=="1": + if filename.endswith('_1.jpg'): + localfile = os.path.join(path, 'photo1', filename) + else: + if filename.endswith('_8.jpg'): + localfile = os.path.join(path, 'photo2', filename) + if localfile == "": + continue + style = f'image/resize,p_{per}' + if per == 100: + oss_client.get_object_to_file(file.key, localfile) + else: + oss_client.get_object_to_file(file.key, localfile, process=style) + + #判断localfile 是否有包含 photo2 + + + #遍历处理photo2的数数据 + # if str(psid) == "96" or str(pid) == "118994": + # path = os.path.join(workdir, pid, 'photo2') + # files = [] + # for fileName in os.listdir(path): + # if ".jpg" in fileName: + # files.append(path+"\\"+fileName) + # beginTime = time.time() + # with ThreadPoolExecutor(max_workers=6) as executor: + # executor.map(process_image, files) + + # print(f'{localfile}灰度处理费时{diff_time(beginTime)}') + + + print(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} pid: {pid} 图片下载完成, 共费时{diff_time(start_time)}') + +#灰度处理图片 +def process_image(localfile): + if ".jpg" in localfile: + common.remove_gray_and_sharpening(localfile) + +def get_defineDistances(psid): + res = '' + distances = libs_db.get_floor_sticker_distances(psid).split(';') + print("distances",distances) + for d in distances: + p1, p2, distance = d.split(' ') + res = res + f' -defineDistance {p1} {p2} {distance}' + return res.strip() + +def is_running(psname): + for p in psutil.process_iter(['name']): + if psname.strip() in p.info['name']: + return True + return False + +def diff_time(start_time): + # 按照分:秒的方式返回时间差 + end_time = time.time() + diff = end_time - start_time + m, s = divmod(diff, 60) + return f'{int(m)}分{int(s)}秒' + diff --git a/libs/libs_db.py b/libs/libs_db.py new file mode 100644 index 0000000..0bc98ef --- /dev/null +++ b/libs/libs_db.py @@ -0,0 +1,3 @@ +# mysql数据库常用任务函数封装 +import socket, time +import config \ No newline at end of file diff --git a/libs/main_service_db.py b/libs/main_service_db.py new file mode 100644 index 0000000..e5eda4a --- /dev/null +++ b/libs/main_service_db.py @@ -0,0 +1,205 @@ +# mysql数据库常用任务函数封装 +import pymysql, socket, time +import config +import logging +logging.basicConfig(filename='task_distributed_error.log', level=logging.ERROR) +#公共连接库 +def pymysqlAlias(): + return pymysql.connect( + host=config.mysql_local['host'], + port=config.mysql_local['port'], + user=config.mysql_local['user'], + password=config.mysql_local['password'], + db=config.mysql_local['db'], + charset=config.mysql_local['charset'],) + +#查询 task_distributed +def db_task_distributed(where): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor(pymysql.cursors.DictCursor) + sql = 'select * from task_distributed where 1=1' + if where: + sql += f' and {where}' + + cursor.execute(sql) + result = cursor.fetchone() + # 关闭游标和连接 + ##cursor.close() + #conn.close() + return result + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed()异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed()异常: {str(e)}") + return 'error' + +def db_task_distributed_list(where): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor(pymysql.cursors.DictCursor) + sql = 'select * from task_distributed where 1=1' + if where: + sql += f' and {where}' + + cursor.execute(sql) + result = cursor.fetchall() + # 关闭游标和连接 + ##cursor.close() + #conn.close() + return result + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed_list()异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed_list()异常: {str(e)}") + return 'error' + +#查询 task_distributed_detail +def db_task_distributed_detail(where): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor(pymysql.cursors.DictCursor) + sql = 'select * from task_distributed_detail where 1=1' + if where: + sql += f' and {where}' + + cursor.execute(sql) + result = cursor.fetchone() + # 关闭游标和连接 + #cursor.close() + #conn.close() + return result + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed_detail()异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed_detail()异常: {str(e)}") + return 'error' + +#查询指定条件下的数量 task_distributed_detail +def db_task_distributed_detail_count(where): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor(pymysql.cursors.DictCursor) + sql = 'select count(*) as nums from task_distributed_detail where 1=1' + if where: + sql += f' and {where}' + + cursor.execute(sql) + result = cursor.fetchone() + # 关闭游标和连接 + #cursor.close() + #conn.close() + return result["nums"] + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed_detail_count()异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行db_task_distributed_detail_count()异常: {str(e)}") + return 'error' + + +# 在task_distributed_detail插入明细步骤 +def add_task_distributed_detail(data): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor() + sql = f'insert into task_distributed_detail (task_distributed_id,step,hostname,started_at) values ("{data["task_distributed_id"]}", "{data["step"]}","{data["hostname"]}","{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())}")' + cursor.execute(sql) + conn.commit() + return "ok" + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行add_task_distributed_detail({data})异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行add_task_distributed_detail({data})异常: {str(e)}") + return "error" + +# 更新 task_distributed 主表 +def update_task_distributed(data): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor() + + sql = f'update task_distributed set ' + #判断要更新哪些字段 + if "status" in data: + sql += f'status = "{data["status"]}",' + + if "hostname" in data: + sql += f'hostname = "{data["hostname"]}",' + + if "step_last" in data: + sql += f'step_last = "{data["step_last"]}",' + + if "priority" in data: + sql += f'priority = "{data["priority"]}",' + + if "started_at" in data: + sql += f'started_at = "{data["started_at"]}",' + + if "finished_at" in data: + sql += f'finished_at = "{data["finished_at"]}",' + + + #去掉 sql 最右边的逗号 + sql = sql.rstrip(',') + + + sql += f' where 1=1 ' + #条件要放在最后面 + if "id" in data: + sql += f' and id = "{data["id"]}"' + + if "task_key" in data: + sql += f' and task_type = "{data["task_key"]}" and status != 2' + + #sql = f'update task_distributed set status = "{data["status"]}",updated_at = "{now()}" where id = "{data["id"]}"' + # print(f'sql: {sql}') + cursor.execute(sql) + conn.commit() + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行update_task_distributed({data})异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行update_task_distributed({data})异常: {str(e)}") + return "error" + +# 更新 task_distributed_detail 主表 +def update_task_distributed_detail(data): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor() + + sql = f'update task_distributed_detail set ' + #判断要更新哪些字段 + if "finished_at" in data: + sql += f'finished_at = "{data["finished_at"]}"' + + if "step" in data: + sql += f',step = "{data["step"]}"' + + if "hostname" in data: + sql += f',hostname = "{data["hostname"]}"' + + #where 条件 + sql += f' where 1=1 ' + if "task_distributed_id" in data: + sql += f' and task_distributed_id = "{data["task_distributed_id"]}"' + + if "step" in data: + sql += f' and step = "{data["step"]}"' + + cursor.execute(sql) + conn.commit() + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行update_task_distributed_detail({data})异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行update_task_distributed_detail({data})异常: {str(e)}") + return "error" + +#获取需要执行step1的任务 +def get_task_distributed_step1(): + try: + with pymysqlAlias() as conn: + cursor = conn.cursor(pymysql.cursors.DictCursor) + sql = 'select * from task_distributed where status =0 order by priority desc limit 1 for update' + cursor.execute(sql) + result = cursor.fetchone() + # 关闭游标和连接 + ##cursor.close() + #conn.close() + return result + except Exception as e: + print(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行get_task_distributed_step1()异常: {str(e)}") + logging.error(f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())} 执行get_task_distributed_step1()异常: {str(e)}") + return 'error' \ No newline at end of file diff --git a/testScript.py b/testScript.py new file mode 100644 index 0000000..a5fb402 --- /dev/null +++ b/testScript.py @@ -0,0 +1,23 @@ +import os + +def check_subdirectories(directory): + subdirectories = [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))] + if len(subdirectories) >= 2: + #print(f"Directory '{directory}' contains {len(subdirectories)} subdirectories.") + return True + else: + print(f"Directory '{directory}' does not contain enough subdirectories.") + return False + +def main(): + target_directory = "E://complate\objs" # 替换为您要检查的目标目录的路径 + if os.path.isdir(target_directory): + for item in os.listdir(target_directory): + item_path = os.path.join(target_directory, item) + if os.path.isdir(item_path): + check_subdirectories(item_path) + else: + print("Invalid directory path.") + +if __name__ == "__main__": + main() diff --git a/timer/timer_to_check.py b/timer/timer_to_check.py new file mode 100644 index 0000000..036b6c4 --- /dev/null +++ b/timer/timer_to_check.py @@ -0,0 +1,4 @@ +#检测 /data/datasets/complate/objs 中 对应的 + +# 1. 检测数据库有哪些脚底板没有处理的要及时处理一下,超过 2 个小时的 +# 2. 检测当前待打印的有哪些没有构建完成obj 的 及时构建一下, 超过 2 个小时 \ No newline at end of file